var/home/core/zuul-output/0000755000175000017500000000000015140317247014531 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015140325030015462 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000277602015140324654020270 0ustar corecoreikubelet.log_o[;r)Br'o-n(!9t%Cs7}g/غIs,r.k9GfB >+YI_翪|mvşo#oVݏKf+ovpZjlpC4%_̿f\ϘקjzuQ6/㴻|]=ry+/vWŊ7 .=*EbqZnx.h{nۯSa ׋D*%(Ϗ_϶ݬvGR)$DD D~m{]iX\|U. $ॄKЗ/83Jp ώI8&xėv=E|;FmZl8oRi{ C2i1Gdē _%Kٻւ(Ĩ$#TLX h~lys%v6:SFA֗f΀QՇ2Kݙ$ӎ;IXN :7sL0x.`6)ɚL}ӄ]C }I4Vv@%٘e#dc0Fn 촂iHSr`岮X7̝4?qKf, # qe䧤 ss]QzH.ad!rJBi`V +|i}}THW{y|*/BP3m3A- ZPmN^iL[NrrݝE)~QGGAj^3}wy/{47[q)&c(޸0"$5ڪҾη*t:%?vEmO5tqÜ3Cyu '~qlN?}|nLFR6f8yWxYd ;K44|CK4UQviYDZh$#*)e\W$IAT;s0Gp}=9ڠedۜ+EaH#QtDV:?7#w4r_۾8ZJ%PgS!][5ߜQZ݇~- MR9z_Z;57xh|_/CWuU%v[_((G yMi@'3Pmz8~Y >hl%}Р`sMC77Aztԝp ,}Nptt%q6& ND l~l78:ZBBk`E\Ƹ#¿Øp*vxyPLSMY 9J}t/A`*t) O5]/* @.yhi-cS4 6"KaFٗt<>vRڡc0SAA\c}or|MKrO] g"tta[I!;c%6$V<[+*J:AI \:-rR b B"~?4 W4B3lLRD|@Kfځ9g ? j럚Sř>]uw`C}-{C):fUr6v`mSΟ1c/n߭!'Y|7#RI)X)yCBoX^P\Ja 79clw/H tBFKskޒ1,%$BվCh,xɦS7PKi0>,A==lM9Ɍm4ެ˧jOC d-saܺCY "D^&M){ߘ>:i V4nQi1h$Zb)ŠȃAݢCj|<~cQ7Q!q/pCTSqQyN,QEFKBmw&X(q8e&щu##Ct9Btka7v Ө⸇N~AE6xd~?D ^`wC4na~Uc)(l fJw>]cNdusmUSTYh>Eeք DKiPo`3 aezH5^n(}+~hX(d#iI@YUXPKL:3LVY~,nbW;W8QufiŒSq3<uqMQhiae̱F+,~Mn3 09WAu@>4Cr+N\9fǶy{0$Swwu,4iL%8nFВFL2#h5+C:D6A@5D!p=T,ښVcX㯡`2\fIԖ{[R:+I:6&&{Ldrǒ*!;[tʡP=_RFZx[|mi ǿ/&GioWiO[BdG.*)Ym<`-RAJLڈ}D1ykd7"/6sF%%´ƭ*( :xB_2YKoSrm_7dPΣ|ͣn/𚃚p9w#z A7yTJ$KOL-aP+;;%+_6'Sr|@2nQ{aK|bjܒ^o(מO80$QxBcXE ء\G=~j{Mܚ: hLT!uP_T{G7C]Ch',ެJG~Jc{xt zܳ'鮱iX%x/QOݸ}S^vv^2M!.xR0I(P 'fΑQ)ۢWP Pe>F=>l |fͨ3|'_iMcĚIdo阊;md^6%rd9#_v2:Y`&US tDkQ;>" ء:9_))wF|;~(XA PLjy*#etĨB$"xㄡʪMc~)j 1駭~բ>XiN .U轋RQ'Vt/ΰUv:ΰdɆΰ (ΰ0eTUgXun[g, ׽-t!X򴱞_aM:E.Qg1DllЊE҉L ehJx{̗Uɾ?si&2"C]u$.`mjmƒVe9f6NŐsLu6fe wkىKR%f"6=rw^)'Hz }x>1yFX09'A%bDb0!i(`Z;TyֻΗ|ִ0-6dAC5t[OM91c:VJR9&ksvJ;0ɝ$krogB= FYtЩOte=?>T&O{Ll)HClba1PIFĀ":tu^}.&R*!^pHPQM}pJ֊iTv)vtT̅Rhɇ ќuގ¢6}#LpFD58LQ LvqZDOF_[2ahwfm#Y~!%rpWMEWMjbn(ek~iQ)à/2,?O 0D"\KjPQ>Y{Ÿ>14`SČ.HPdp12 (ޠqq\9#.\r= (mO(f=rWmd'rDZ~;o\mkmB`s ~7!GdјCyEߖs|n|zu0VhI|/{}BC6q>HĜ]Xgy G[Ŷ.|37xo=N4wjDH>:&EOΆ<䧊1v@b&툒f!yO){~%gq~.LK78F#E01g.u7^Ew_lv۠M0}qk:Lx%` urJp)>I(>z`{|puB"8#YkrZ .`h(eek[?̱ՒOOc&!dVzMEHH*V"MC Qؽ1Omsz/v0vȌJBIG,CNˆ-L{L #cNqgVR2r뭲⭊ڰ08uirP qNUӛ<|߈$m뫷dùB Z^-_dsz=F8jH˽&DUh+9k̈́W^̤F˖.kL5̻wS"!5<@&] WE\wMc%={_bD&k 5:lb69OBCC*F!6~ö9M( Pnuݮ)`Q6eMӁKzFZf;5IW1i[xU 0FPM]gl}>6sUDO5f p6mD[%ZZvm̓'!n&.TU n$%rIwP(fwnv :Nb=X~ax`;Vw}wvRS1q!z989ep 5w%ZU.]5`s=r&v2FaUM 6/"IiBSpp3n_9>Byݝ0_5bZ8ւ 6{Sf觋-V=Oߖm!6jm3Kx6BDhvzZn8hSlz z6^Q1* _> 8A@>!a:dC<mWu[7-D[9)/*˸PP!j-7BtK|VXnT&eZc~=31mס̈'K^r,W˲vtv|,SԽ[qɑ)6&vד4G&%JLi[? 1A ۥ͟յt9 ",@9 P==s 0py(nWDwpɡ`i?E1Q!:5*6@q\\YWTk sspww0SZ2, uvao=\Sl Uݚu@$Pup՗з҃TXskwqRtYڢLhw KO5C\-&-qQ4Mv8pS俺kCߤ`ZnTV*P,rq<-mOK[[ߢm۽ȑt^, tJbظ&Pg%㢒\QS܁vn` *3UP0Sp8:>m(Zx ,c|!0=0{ P*27ެT|A_mnZ7sDbyT'77J6:ѩ> EKud^5+mn(fnc.^xt4gD638L"!}LpInTeD_1ZrbkI%8zPU:LNTPlI&N:o&2BVb+uxZ`v?7"I8hp A&?a(8E-DHa%LMg2:-ŷX(ǒ>,ݵ𴛾é5Zٵ]z"]òƓVgzEY9[Nj_vZ :jJ2^b_ F w#X6Sho禮<u8.H#',c@V8 iRX &4ڻ8zݽ.7jhvQ:H0Np: qfՋ40oW&&ף \9ys8;ӷL:@۬˨vvn/sc}2N1DDa(kx.L(f"-Da +iP^]OrwY~fwA#ٔ!:*땽Zp!{g4څZtu\1!ѨW(7qZcpL)ύ-G~^rFD+"?_h)yh=x>5ܙQ~O_e琇HBzI7*-Oi* VšPȰһ8hBőa^mX%SHR Fp)$J7A3&ojp/68uK͌iΙINmq&} O L-\ n4f/uc:7k]4p8wWLeUc.)#/udoz$} _3V6UݎvxyRC%ƚq5Щ/ۅw* CVo-1딆~ZYfJ"ou1ϵ5E bQ2mOΏ+w_eaxxOq:ym\q!<'J[FJ,4N:=6. +;$v6"I7%#CLTLyi{+ɠ^^fRa6ܮIN ޖ:DMz'rx#~w7U6=S0+ň+[Miw(W6 ]6ȧyԋ4ԙ./_A9B_-Z\PM `iĸ&^Ut (6{\٢K 5XGU/m >6JXa5FA@ q}4BooRe&#c5t'B6Ni/~?aX9QR5'%9hb,dsPn2Y??N M<0YaXJ)?ѧ| ;&kEYhjo?BOy)O˧?GϧmI C6HJ{jc kkA ~u?u7<?gd iAe1YB siҷ,vm}S|z(N%Wг5=08`S*՟݃*־%NǸ*kb05 V8[l?W]^@G:{N-i bɵFWǙ*+Ss*iނL8G+mj(^>c/"ɭex^k$# $V :]PGszyEZ]DaUS@''mhSt6"+ҶT M6rN+LxE>^DݮEڬTk1+trǴ5RHİ{qJ\}X` >+%ni3+(0m8HЭ*zAep!*)jxG:Up~gfu#x~ .2ןGRLIۘT==!TlN3ӆv%#oV}N~ˊc,_,=COU C],Ϣa!L}sy}u\0U'&2ihbvz=.ӟk ez\ƚO; -%M>AzzGvݑT58ry\wW|~3Ԟ_f&OC"msht: rF<SYi&It1!ʐDN q$0Y&Hv]9Zq=N1/u&%].]y#z18m@n1YHR=53hHT( Q(e@-#!'^AK$wTg1!H$|HBTf̋ Y@Mwq[Fī h[W,Ê=j8&d ԋU.I{7O=%iG|xqBչ̋@1+^.r%V12, _&/j"2@+ wm 4\xNtˆ;1ditQyc,m+-!sFɸv'IJ-tH{ "KFnLRH+H6Er$igsϦ>QKwҰ]Mfj8dqV+"/fC Q`B 6כy^SL[bJgW^;zA6hrH#< 1= F8) 򃟤,ŏd7>WKĉ~b2KQdk6՛tgYͼ#$eooԦ=#&d.09DHN>AK|s:.HDŽ">#%zNEt"tLvfkB|rN`)81 &ӭsēj\4iO,H̎<ߥ諵z/f]v2 0t[U;;+8&b=zwɓJ``FiQg9XʐoHKFϗ;gQZg܉?^_ XC.l.;oX]}:>3K0R|WD\hnZm֏op};ԫ^(fL}0/E>ƥN7OQ.8[ʔh,Rt:p<0-ʁקiߟt[A3)i>3Z i򩸉*ΏlA" &:1;O]-wgϊ)hn&i'v"/ͤqr@8!̴G~7u5/>HB)iYBAXK&4'h9Dݥ:U:vV[ 'Mȥ@ەX㧿-p0?Q6 y2XN2_h~Cֆ֙82)=Ȓ7D- V)T? O/VFeUk'7KIT, WeՔ}-66V؅ʹ;T$pZ#@L; ?0]"2v[hׂ'cJ6H4bs+3(@z$.K!#Šj2ݢxK-di +9Hᇷ絻+ O.i2.I+69EVyw8//|~<ëng)P<xͯ~? fp,CǴ_BjDN^5)s('cBh+6ez0)_~zJz"ё`Z&Z![0rGBK 5G~<:H~W>;ٍVnSt%_!BZMMeccBҎÒJH+"ūyR}X~juPp- j\hЪQxchKaS,xS"cV8i8'-sOKB<չw"|{/MC8&%Og3E#O%`N)p#4YUh^ ɨڻ#Ch@(R &Z+<3ݰb/St=&yo|BL,1+t C<ˉvRfQ*e"T:*Dᰤ*~IClz^F6!ܠqK3%$E)~?wy,u'u() C>Gn} t]2_}!1NodI_Bǂ/^8\3m!'(Ֆ5Q&xo 8;'Jbo&XL_ʣ^^"Lq2E3,v1ɢu^}G7Z/qC^'+HDy=\]?d|9i,p?߼=\Ce"|Rݷ Q+=zxB.^Bld.HSntºB4~4]%.i|҂"? ~#ݤ[tfv3Ytck0O ͧ gP\|bЯ݃5H+v}e\0zE|!@E " ;9Ώf3kZc7B 8yݪkIf-8>V#ہll/ؽnA(ȱbAj>C9O n6HNe">0]8@*0)QsUN8t^N+mXU q2EDö0^R) hCt{d}ܜFnԴ.2w⠪R/r| w,?VMqܙ7;qpUۚ5Tnj ۝jlN$q:w$U>tL)NC*<` `)ĉJآS2 z]gQ)Bی:D`W&jDk\7XD&?Y\9ȢG:${1`+i n8=%Ml%İȖb7AޗuV3A7ำqE*\qb'YpuHƩҬV nm=Ɂ-2=|5ʹ zi ' ׹U>8bK0%V\ t!Lku`+]c0h&)IVC)p| QUA:]XL/2La[Xѓ F;/-rtx-rei0hE˝ݸDt#{I} `v;jUvK S x1Q2XU&6k&lE"} Q\E)+u>.,SzbQ!g:l0r5aI`"Ǒm O\B!,ZDbjKM%q%Em(>Hm 2z=Eh^&hBk X%t>g:Y #)#vǷOV't d1 =_SEp+%L1OUaY쎹aZNnDZ6fV{r&ȑ|X!|i*FJT+gj׾,$'qg%HWc\4@'@—>9V*E :lw)e6;KK{s`>3X: P/%d1ؑHͦ4;W\hx锎vgqcU!}xF^jc5?7Ua,X nʬ^Cv'A$ƝKA`d;_/EZ~'*"ȜH*Duƽ˳bKg^raͭ̍*tPu*9bJ_ ;3It+v;3O'CX}k:U{⧘pvzz0V Y3'Dco\:^dnJF7a)AH v_§gbȩ<+S%EasUNfB7™:%GY \LXg3۾4\.?}f kj· dM[CaVۿ$XD'QǛU>UݸoRR?x^TE.1߬VwխmLaF݄",Uy%ífz,/o/Z^]ݖF\\UR7򱺹...m/~q[ /7n!7xB[)9nI [GۿsH\ow!>66}եl?|i [%۾s& Z&el-ɬeb.E)բA l1O,dE>-KjLOgeΏe|Bf".ax)֒t0E)J\8ʁ,Gulʂ+lh)6tqd!eó5d ¢ku|M"kP-&ђ5h ^pN0[|B>+q"/[ڲ&6!%<@fpѻKQ31pxFP>TU?!$VQ`Rc1wM "U8V15> =҆#xɮ}U`۸ہt=|X!~Pu(UeS@%Nb:.SZ1d!~\<}LY aBRJ@ѥuȑz.# 3tl7 ]وb Xnݔ[TN1|ttc‡-5=VrPhE0Ǐ}Wd|\aD;(;Ha.]1-{s1`HbKV$n}Z+sz'ʀ*E%N3o2c06JZW?V g>ed\)g.C]pj|4逜*@ nBID f"!!*7kS4޷V+8弔*A19`RI/Hй qPq3TY'퀜+/Ĥ'cp2\1: 0mtH,.7>\hSؗ΀ѩ آSNEYdEcaLF&"FhQ|![gIK v~,Jc%+8[dI368fp*CDrc3k.2WM:UbX[cO;R`RA]d+w!e rr솜[/V`+@;Τ`5d0ϕ_Lع`C"cK>JG.}Ε00e>& 2䯫vNj31c$ i '2Sn-51Y}rE~b>|Ď6Oj~ebIapul9| 3QtUqSCxTD7U9/nq.JYCtuc nrCtVDƖϧ;INOKx%'t+sFUJq:ǫf!NRT1D(3.8Q;І?O+JL0SU%jfˬ1lމZ|VA/.ȍȱh M-r ~[0AG꠭y*8D*-Rz_z{/S[*"꫒?`a;N6uilLn<Yllmb rY״͆jqTI!j.Pٱh s!:W_´KxA|Hk1nE6=W|$O -{]1Ak$ ѫQ6Plp;3F$RveL l5`:~@c>q,7}VE-Q8W70up˳ A¦g/OEU:غA>?=CۣPqȅlW11/$f*0@б 2Dݘrt +qrx!8 J&[V =͋A,z`S,J|L/vrʑ=}IhM4fG(Ȋ1{TT%41Oa'$ 4]isH+*/Hqqq$U;r5Mk^! 4)Y>[8b%oD46IGI X$uRl(}U\ @`_mR$"ߝ.%W}Յ$ Q #Jjq*mߔci쫢"rqQZGѰ*lY_77ۓ[yoO"0o".{I&2ZeJm5-,@W,KDb?{L{7VZ9cV+Gz[ -b{CaOL&ݫdqr. 6rK^ϓV0gXM`<ة'$P@&QH.Y3MTb\awmwIQkeUtRE+cy.}߷aaevĂۺiYr67<^fo5YZ,͊v[ :GqDQ/K-8?E ﰵuK'aKy}YWEviw?sMD2$=doS6iڊ5M2E<B#.J`55OQ*bUz^)OP֨.E$t/h*h4errKwyՓD>%xXߒYW|KoNFqQ2x$V ;,ٚ&x[w$;̜U|`ުa~x9IS`%bb%dmZ/q+]uw~?MMкL\4]4[$c3qp.Q 0ZG'g yex3brF5%<< =͵=GwQw`<+31% 7NQ ĵ 2hDR,qw*_s9wos. |Oy,˨qgr1o}y;}]% OrJm]*Ŵ7i }k wVD@ր'3¦*.*8NO6QOp|t;>AqLgHNz,'NӢ$@+c)) >Q[%(oysV$05i2) 5X;sQ]'\!nH`ާi9 h.M0OUG%u?LIPrKY.NSѐ@ۯS_E#M+I>5\ __,Kcb"aȋ }$:9^V@@| ?lI #3nY .>\8Ÿ(!,l"5k;4Mc߱uR0]?K\D˃=ȣ4 Y$#HjKVcQ 4Ӧ,;)#!8EG'(YvY¾I  =__p>A]JiGBE9d{Dcd$f\ooɭN mkno7'gI޿K@9yx^'liyՙ17\"l li`\Klz 4U =(>h1p ;|6[9gxE%V E^8v]cela<; FEKIT~| nK^psYe+ITIDdo̳*Y?\of~{͕b B&N.(RM㳿+z1c'&G2.qIHоbı}Hvܾ5tk͹K٬Feѻ#Mccd{xH]{v\ ybQ*y_eE[kxFq={/ŲL{e]-M9<0t}S8Šk"AҚ(?&`ၙ5 ZH?R$7 o Caۣix,2œ !.k&eUD ,N'yM[ A1m5_ojh"g448U' s"!Vhl48E{567"ZS8< 7!Uk[E=v`8 L! 4H-PQ+Z-UF˵7UyE25%gxNnd%V8*kcx4 )yJJݗ9뙎_$uQ]n8[$@7R7)r&)sht7[iBnJ~ˁ'QW? inI|?o DY&ED.|aF !I處ıCꦥteEq}3Cv@72&hnjYW!޵:<Ȯ>VȢԚhiMڜ2][8QܪZxj^cM5yF 6gا3}}#xϤ4zݍvOS:< -Zfyp k?ز J\mUXtgj_KTAG 5="fmRO߽RDciv1te-vk}; )K2*V!3m%byt3SlvPyv'UK[h@LŚ7Ķ`6(u+%J*ÀFtBazFX,IU )֗PRmJT &Zϴ_[OmRͅjziŅ%zv ЂP͕,hZQGLa ۆ~uZ aygkloO#J EZPm0pB^jaq"Xwj˦dD st zoLsZ>`QuTU*EIs%u N; pnXL[im aQӶplCM^2ahvq )`1M*Gʍ ϳ*_QgeֽkP(z0./VmVuDJp$ cy*Z@ߜƊGUOڇB)!suKMgK,+?4&+@Πrd6Ó>-Re[ " jd?\ AݤN.um%p?6Lwl7De/EUb|w؞C_VuWg>Oa5 ]gvjV $願mKZR[Pܢȴ:  U4 S9Wu`Ã-T4s1)R[c]cXV,]ϙ/-˱vi g1@L#80c+oGeEǸq@s醱kKE_р.4"q=˓22g| ]nLzg4¦$Xqx σ/s$ -%PI?M5v'-}7D=s&]ҎRx9cؘnD6i\'2U5d}G7r$daqU7[3 QE&SCO&_а*oa4XSX&[e n@,OZ&3ݞ5uc~"CU2ɕ] B\ "Uy}SEZ̮Q/HsU9;UZF^W,ϤQpv-h)l"+0@}f*gXw͵lG&876& f7~CԽ] @ {"SR ,}wZ"Qlc':#wצ(؝aݡ0znО gFuHjۿhGNoxBTnt;waww4B ~ZOnX;Ҷ;2 ֮@P.q ܶ6# W `}f  0\cWƐ#w`$صQsoF7s|tBn7dyB]GcLˣm6aʝ\Xu{V(U@B 2Q@O(ȣ70?0 CuPFam/y_L C@]SLDp :4\AgwW C0ӢAwa!6i[#"BUڀ,:vi vrV1.muK~/o0fDN$Hލe- #cCJ0\Uβ$/Gڿk72>.lLVV pw^a}sr>uGH<Itq !zrk/aMo?Ӆ f@k8#06>確'҃|J~RgZ|ƙBwY(zrl`7m=`[$'ڒO7CJ2;^tj}ı$!5`@G[ ܮ2wios7pKR#o߿=9/ޟ\o.O^t|Vο/q)ȝ#n{1X}w*.s5cRQ4b`" G̮b':dT@ '@ ( nG35`~v ny#F?~?0`wMD^s]nvT Ýc(R4b:/Zf"Ӊ=DwXVGI iW,1!] )b=랝svau!t( {pDv8%WH6"֩tޮ)D5x ;!v@aHkº<ّPpt-@)w0⊏X܃_ ft-AqbD,rL@|?AH kgOrf o㨍;a A݈M(I'/ܿ\rˌg# !uݐJ~IjfJ;\ekv4 `P(x#I }3C!wguNP1jXSU~:'èBǗp+@aOU8ޗ*ptHΨ, ^4 c NUH6_5~|d|3:-O@܉QNT4gP `x2b XXip]e*ooh@;L<|hoN̴CyKLG f*o=w!HD2S%}~C] HD@J: bgL/ A;0WYD$;! @eY a (#QL^udV7"ǡs5]q,nBfeCg vN"#RYk^,et g;$;{i=Pd{]$m8"E.7IZ EbxӐ4sL * ML^E`8Ҷ7B[X?BR7j~.fBL\(̲\+ꪚAf2 Bw3tض7ʁ<(=Nm]W՗ĠxY4%ק>xq?k~iTQ&F phUGiRd4:%v3o0bvMG:{ @J!qrqJ()-~}F={S6r_,}Զp*y2%1׽0P$f ZUHGZ;F뀻Jȭ7 «'WW@IGaq鄺Ħ>BGwЋ qr7CYd K7\pttAܟ\WEݡC:&Gxh"6[THdCE"}'pE@~; 'Os+A}3/L >a|c0LG%FWЮ ƪTcQUד|٫ՀT]e vy{1[1,URPT U'Xƺ jUXğSLkzMay!%~Xޟjkl?,/g!;r5 `l-29١z 99-8}A~^w Z>kxo/'e4]x,zG ,a6=M'nnՓ?=p>DvvQCyt*7{z>H-\ے雾MшLPH*0€y3a>W]W^a0aOC>a+4nVֺJ,U#naWn2K[T"}ɯ]2(44sE](3mӫ"3U(@J.HU-((4 \&q WEZzutd_/s? ,)ޗн?3 i_loe_z}=0nW8 z|~\9 G.\_QU|fsNṱ x)P42y4H<}8'~^IE֑  j- E`7W-NM]H9M"U oemtKG|R,x h[\I>3fdC?[&ѣwpzZㄓTV_(D=RiA q4҈B0*'1y'eP@јV3YuH1 aQwe,0!4^Գ0c΂O`N#CTȩA Vp7^N)`JT"b:қ`09) A00 jִ@mÃߟ$id4@8OrG\3oA#` >N# #c #x W&yFްZ|a?8/HTä^#f?UM; ,K)gޜoNf_uCa=j߃w[x?a6}<C΍t_FZcp}1,U۲oűV.1mWDGZTz.)o>~qݧ^&az13Z : ]&׼@Y9&l ԐJPx gYGBTa s H! ?=RҪ*[)cBRnk;A}^|E 5`~I:>VSE/i#nrs[#%GQJE0IT,CI˨LH֕}v1ᩡR*EjXvXD$Ai"GSVA8H3 xlS͵PQGEgjVRǒ/+9Cr|}EJ ixE*b%|vu;C$bajzVZe/5?(3p^"~IW--mM86/zSd6[o}-?R= ClX|+ͤHrd)[[!,c5 NkkT OP܀/}y>j)n*6`=DVan!6LZVQU>"ؗC dk do֤i5j7;?PS|ZJfLɤ&Q&.?~ݭ#׿f{sa@KoY--djēlGL #_t2*!)MYlGvoѶ=@Pj4idF#.릛)fCL 1QPHQ.6 &h~$I[$z"cI9 c^őT{jGA=ÑSƬ0!jYad |RU?E^wbl >.Qr|z+4[54nt%]ʦl]k^ ǣށ\=NV% z| ?11d̯]$xV ^cQ- +{uбK+RT;69躽7Ȼ֥6$*'uLļ>Fd%l~] ͚IGVK|ҳ/$?l?vNf5̾uA멊;fp/R/ hEcO,>l;Xoܖ`mY `\6ߩėS3uԗ? m\$t 0}?Tc)=+8-|?Na韷L)OƁr:wIeZ|ӕeۧc ׅ1i㛆z&iwZ (V&!7y K x sY3FDcfF`bQ8dS5­)iYJxCyj8Sw tFR?`SPQEfWL Z1zM,)F {|MvXER51d1ibt1lzӺmc!'?Mw<(mlhv2{iJcֹtyYS &:JDtgق3KY }4ǶTpa$Q'XE&2lV(x]012pA 5#3ۂ[Gڨ'Rs5)njس4@aDFU+8h+ִu Bcl m} pIk`h]t霶G^:C$Jcs.ED֬6evѳy.IHrsK_cBjF}O:c. # ǻf#VǨ5ZU֢aJX9,IW8_ˆG9G[u>v7W5&V{^4rfbjU z_iSBPdp5UdԋD >U^# 4 ,$(y.N0F 9Oq7(0'"+U1~bZ9ɼ-tIeI.b$]2 n,FJSxK8 ̋дRIxIW8#я=`@gG8V+r5,Z+TPY0ęƈyk@aeU ,NXJ>p _*bİc-VǷu+pK.“cK>ezpA2-4&X4 6F ߋ&qt8#rJIxaͲG RG# ǽ,/kߵH󞆩8%#K` GjHpcoKrv!(DtC,U3\#$]XG;c+ |ي- G$6#vזS-u`B.K܅CƸIڥЋ򡊡Q'V f JGs\xs],Ş闂K2ȇtI\ ]=.|[xꏣ փӲ^AX=l²Pua_`P{I hdjV¥s/-kei8"i4B1bw47H}s3)[]p@.>$UnbMS*)Ο93qs4q=m/a\ڳώh1qhk52n:idӲ؇ i̽4ShT,6R$tppDo)&`$]~zL`iiA2]*hڞ=yJ\pa,X(@mt=qW ntIA#c8o_9S\!jdc2B:>̩=RD4 :dE;Us&l%]b٬y3{,YS7ZWXС56*l) ! LJrHp@{EyiULU5hp:%$-"4 7e]ՁMkh ƅC1=kƏ!{âNIog[V jX_BgB9勨P|ܘRc/KGJA,eP-c^ҕӾ(My:QUv8iabH17Em5Cn3x<#]JgtҪѪJVśL1D$|Rz5V S6\YJi#]!ZLF {PB]lL[Y3 8 6ސI#ު*I]/$5 YK95 HI6)xwuZwV^j<|;x* ;Fټw:\Ppku,PVl-Ȋ1b~LfB?m]?Xo.xބc0bH)veP#/$M$18Z}uQQK&%\EI%mV^UΒt",W)cWRRCft_0۞X)2ecc46tPڹ Zk$"!8w2Nr{~Okm\BjǺgaF,&lz%Oh˴ɬ<-c$]c]ٓM581ݦ 7:b2W3m#Z?i8!Ö YpLq愘J,.%,{ϳQ;[t[:MMz;<0Þ^#qE7Gj6Y%^u_(pW,h9a"Z J.xĒH3u4YfP\ )ٜ$ !qpBuՊ]/#y(9؛YZx3B#83#Z= #{3:J &X8=_Ah~%C9(Mq>_pD>_O= ݛ `m}ܛU[ 0x 8@}SDGDS{ʹܾ:q"(]ӬnPwdt~JgeЧ5=H G.]u9˨U\w6p~$AQ\Kӱ)硡&vpm~i }JJ[g W\ ]N EzozA#FlPfIVIL;ז$!ٶtuzGXs THgfjw_d>i?zCo&d2 ݐ;S^҇VRvX$Rv$Xo6Kbl#g[rƏn[X()X'X ˍ۩YDtK_krV&!\h0!Kx%Jّm#!}&q 3xA9JdIRK2c,U_h`gfp|B"bX+b[ +_$8zwSSmK .ujrȚզ Ӯ9{ \]\$BiK/$g3<qp2c*G5,g2b:RLcII8זhp 7u'x^Ѥ4]kN4PƎg8^- y\SXANj<}+TT T*b&=H rp$8O`KeVD1uDX״a˝Ilz:JKlyrb0?DC07~bPS=OZI&LLE %81b4H,6?)G z,Q 5b9/aJX`r%FFT~f`3 %Yjyg<1OĉR%l4wJ5DSWgÒFds,Y+ n3m1H]HmᎢ%751@y5$8Mw8+lTrpvA RaMM'yc^4#ztݜfG7pOA2P-[1RO {[ ,vI&%$cEPaĐg"kkuc!zG!hoՌC׿SS0θXK6Ffr_ioӂ3 z7V7XwN< %5hf\]J;YvAwسyS\" I8|X^o_֫= &g:\ǂ[#tCK#&w)/3 {k"q MN}̛WCt=٦7ˌ_ghԥWMQ-l ʛRS$ѼۮxқzO"BF0"XK){tإ IWf#ig0^ٰZwNg:B?I+S\R)ۛuSI8i.|-uf$)$5$ő(=_7zUWi惁F[q\ZIHp V%;Lq( Rfc^"? o@cnMjXe.CF^V|q-{YZgvp{K@I»f]/nb] JOPTq95$׵f`~)a'ߏ~|a=iR{z&zt`_InzQV2$)N:O?1X%$r晃nYM);|,۵Ȭ.N>哋KF2PBTDqzh_Yhz ^?k;O:c`è^&$0̶K?9HN߼1Peܛⲛpu. 4Q6kf[hwJgχ ~\g&\yi^n  _?wͿ虢&?읋dTtGA] ǃGUq ^''oƾN 'MdbG UGK)zil Xp׿0 tݎ! , qK^K&Hր~2{q +a?@3'3JPSRk~;+Gc!ƴD2 B@c/瀧‰&f21𣉘 Z& 0qL-h>psvEד)/9鼅wa,m0|l%˼l}#qym?{ei%llG9K\ߋ_Br{y Y5 Yxy1*u_Obty8h?Ap؏\^ &ƂBsJL+ Tt&A45;%+6Ry<漎yd @1ռVBv刂,'$]\N0V#uga?z@eˆЉ1U i%4'g,uܭ4>=L߯Dv__0aKqyX^frc;_q6N"`!#WPnTǔiZgf: 9yU}rt`4$Ո*a808sCp\x`! \r>%027B YFĬ_;&nqvuMረJ:8-/wg% 3IGӞo+nR#Rү *imXdW1_5y&?K*p!MEEWD YL78Y9#1D 䇗/;I|y@,N~ GEM7qP~4qj 0rCwPr(9.kA>),Yxۯw 0R7ˎ ~&s>š$%ˢp5ɢB%šZ`S @4#)?#naњALz%&!(Kp]au9iUp.uRqCvYвMNA3 /8&_He}o6< jHBt0*kET[EزWX>H~dEܠh&-rÚ3MbUZ)"A[CEd nK6` 6Ym%.\􊋆 W b*B[5m~Xq P'Ak;0̭gF%رtBE/D,o{}X0-#AWz; eq]8ڽ ZQQÇN(w {$2=~uG:`DiY!L#u u&|s5;y2mM0Nwqoxa[cm&WSsR"V{uYC(+59o 3w5ԗy+[ b5AuD`-N2s,3;4 e߭{C& OoX3&V4ޝVok q猔(B{2;nUMα-UK5,kVJ BSR};1-&1!T90nH/6x=-[ lvK, o2HBF^k@`j10Q6{,Rq⹠. ܾ"ZWŒVŮaria5PFˏ*dfMXͶߥT2&Tc`3Ih鑅 pS5iU4G8%ں|ߔKhG%R#bpIr!jŕh-5[ zWK?>}㦂]7tK>řTr%@jaS-45ؕ+؟:`px>;2 }2jA,˨һgWWY ϫ+0ۛ(㩪fTF Qؚnɟ+-VƾvfyFJMyeWI m{OT%+^rOc&Zf~-Wo˕=b`mV!ho{=ʾ Ʒ2{jDnkl<1O>|͟v$hMO>s=5Q=l3xہL>XO7gq) IPo qsKvcvQ\_ T?3n8Ōv9Hw?p.1Y%m@lx9"0l<y=|~~|9]H,;*z10%ei\8r2Y&83Di37yAk:S#Zi*'")|C.-uO|OlQ<^E N!Nu@o`5}^w=Gw2ho,cQ28S @o!߉Ϥ+WT(sy 25|x=4DR`lԛB)VOb`'ў|>(ׁ-4/|q\Ƶ$EĨorp fj),FS }Q^.l&yP!"%wv@!/*vq޸lw!#JmVO Gɷo^]=&+ζBs p勫D}Y>iv>L~7&0ղ9Uj:+*Tkͣl.y4ξ6B㛧5+.'UвFZ52RLco@ݯҍ]J=\| #L\ݕ?fJȋ)"qَ @µ_-`3ylWöЈLaaM;hVommW|ڏ󓳊,)r˓s/JSk)|#`pFc%AN iLRQh8fvB:y5}AqKg_sME4-I Ѷccĭ6@-sY! ZCmSF3RSˌAHȤ潡^xPk4K+!}WNK"VLRjf3)B:R2*#SZ|2D͒l|z#1LGVVydŅhGV\xdaụL bi¢P`8!C 3=Ff\# BnXmb/pbz 0m m .E6z#ܶpۘ) #7^Su`XSRx(ՄKi8q$Jņpk FPc*%o139mrSP-E3 .%k|EiyXE*Qes" ]NH) U497«.DNJx;(S(KeeJ+U°tP+4-LI@ri6r 3BQTfIӑ| |"r8Ϙ,9hb@7K D{i /(g.CrR"Gy* u0djI(wᦳ#V*4+jLW6HBAh 0A\ 0A}wBkSRJ̡ׅ;ih8]X (oSFۓP8+C+E ^t!x(aAH쵂=5}h8b?OU'dHRRGD0=z C%R޾~N?BP01 +$d0VHk~h9#G,!4Ϸt1"åDlAD.FIzZ蠕(btfru4)}!qP/=o#_=}&T;2C˖zG Z/6n^Q= 3qmЃ獾jOx<2sM τ('̅""@:G )p )e*9-4+<2/9fC9L?ɨ1c`u+,W@+8W7كg]XfA'6tݠZmk fՂE La'@2gFi3`81{nq:eRȄ"p2$"]SƠ*$Lpd\(8SG@}(!t^4Eޑ|B a(3tBPF %QbP "'ARJ aq9$nb@F4lXBA FF6hf(lU6 e#FҪ`3% myWF<ϗg5{6\X\2)mҮ&bw2ygl>X"2׎:L:mg&,̈́DOT#QkH>o L'Xm.IZ^wo*|K[f=5-4b!<7܏_=+:0J 0L H `l0r r0r@+`Pܠ(\Q *J|wW%6 p6 Me3#lBr>[PTn p ؔ*Ws~(E_`*FNrgTZa}(fjqpA[*8dBky k9#TfG+݇!uDyj?3ooc{Q ;9I{X W`"Z)JkrV]aoC7o7Bg~svY-zD߷aZoC fNF 9G tH[/0~[hs-Ge.I#rrg~L+)9͔ySzLAO, 02W$\C٬C-Q< h9@X#wzDBŔ)Uc83&@=$|ϛ0\( EV} 8+|ڣLxBk</hWu}B㦠dۙ/XgU$6)/l'PM.%N(Yn$~g 4T1 u(}Ic=7̽wM|uui7_J`ίg]efKw*p jlWuKH[C5 p.>SU QfCSJK`^HeS^R"wZ-׾׵VvkZ"6hnAq@c\/N@Ѓ .x \ 4ћd |,z޲DWaJ +#SD@?¼=VKgo(/kGCWuCJzLƒd $ϯDd#넨5Pl `NbUL-/Aqv<4 MZgk=2ԧ/D?lF֙y30[47|n|ݒ?ɮh?o"|)ߎvxj*nXի gq6/ka.#t@]"Ԓ N9AIGETTj z6%Z^ 3PpJAG7̴τ<8i,fj4'‹7Z-fwLOYQ0'pOЅx]\ͣA+e}XH ž^ f@ NGe^y}@QkC))#cɡbsPkLoTuIL49%lNq|LQ*E 9PI+)=& {w=XRӳ=6B ll9R1xC_o*b/eso g|6ϓ_#n?{2_VWmF:W֓ kVTmT%彇:xi/Fr eM-BV613E Y}F^w|t;maͳ$չ_†]Uk I)BNl\Y߼ Nyl~NtvCm.Wr5lKT*ϺgJ8&0`_>W/!7o[ʆkTw*_tzTzn2)Vu@+Iv *J`̌1Xg9~KWkg8(DV,w7Qp.ž~,`<iBJJMG/'E|5Qz镁pP$3[jݍ4iU>k (Q&M!,?2Ygtm…ItQWN1&-CCy՚x7A챸y?S61U@wpӬoB!?߶mƳA^vBo8oN6su,In2hҞ}0VjNX`$ %W!,(nAofg m݋^.䷰kȇOB) DR44?c4^ļCƼ:%/M$e<7y6*=qһש*Y_nn7 #U#'nB*imw.9 ;B U[ 62]?tpoͮ }nqKSX׻ SAN` z[bt*i71ǿ2u[_&"~vݻnӾQˎxyjךxup R\hy `L(FͅkN 3a>U$Ȥ/GNKWvפr'{|8:UK\*kE񮖰|scUm֫\d7rQZ H@A}gw [lD吗+YQHѓ 9ża?Nf}jr_;G\Mf)S.|!BgB}ޟ{.ϛl^>AzLJpC[u 2N^Md🈏Яӄ@?ф@yFj0:ITE$WYTxj?o>X"zWUL*kpFQE0vjve*ModU'? 3'M9ZGvBo$!eZ|Aﶡ6X疄Mx/fc7'erj^%[sݮ]QxS(R@?$Df҂ 2uUnrYخ8%heKZ3A +:-iԅGY=x?-ѣ[o}EekDT6&#y;Ji1u1OP)+vHo6K9p> SfJ1!%IYwiǟ%-9wnFlIn <-Ĺ{.S!s_(\Rwn3DJmy9P |kQ=ޑшz )! |SJ56L΁:j{Ir_`k_8$].n7͡ZlLp=,In,v*,Rtd,j!rb+3*6v @(oAN1o6K\=lnc(Yd2t.iC,K HI@(wxxI^; K L94 kb`ИHzdrQZR FF@:;̊$́N8fPwWD@}zX97)Q,ap>s'H^0~3/|$N'v@HtY$fC%,'GlŌ`#M=DN&֢$x $i/wduAplKR;+Ož^mg68f.akǾgu_bE NIRC4)8+yI9zq]/E>Zƫ )A^V03^1p{y*\A] S&-h)ETmT]nE> #/͇mEMT&S3*<7 ĄvRUT]lfoo:3Q:`Ds a7%G,$l(5M~6D( .uPk"laCΨoMt Z̸8Nrwn H_E,6_q~vZu'rUձL6& q Ϭ(#ցn64 n=V6fhE>]j,4&aR<9-NF˙bY8Pwr#[>e_󩶛Ъ}zؓ< pڐE'jk:k &ZJW { ݔ*';,z2CL8t-eFqﲾ/ -m5i1=eZL[]OƉ;~/ffou͝\gńe~ an5vuH 2hI-"^k&@1ʔ)o#݈;|,USD;S_aF#_6M޾ɰ0IqiH%wJ(2'X^A07"kb^拉/EU8g&M\+\ ( ).s&[1久IGF[yѭR:Ft S:=;hG<#(AUY1ud[_ SVy @Q^h GWGBt1i4fL9m$=O!=2%8 `B.̄M¶sly1 8 .v(`7Ri4 y#:>+wz*XЕU M>@9/Z*G>P{2kgmyYڹ[;YM GavorD3K|?(ý}y(:1uߐy~{Z"A)p8I:y4n" M Z#,Ɓ%^a;#MC覂Bdu8y&^arG םTFG'<+˸|WF 0X5;Jus fjVy#i;q6DG>\6a퓢;kmCebCK tcD [NYmr9uA]r$һk|6%nI룎 `ª/W_4ZD{dTw^h~t_Q{dTjqjJGSk45w7ȨA4|9/q ~X=._wQ`#)^$=2*nTbI~icˇ8ۆ濜ʩ8 {9UQzb zdT\OWC+e䨞=2*nՔp'i%֫r5J8Ω [O9?ߛTaSҵWvnPfʬQqSFLT!mjlOEXR,4`~ctǢ9 A{}-up O4=>&3p/T>9rĶmbmb"#ԙ).oVy>U&eUrn)G)ޯ͙ {>ͷxxwsMf@25CnT[j*Ul28CA1Pjc~au/:pʎ<4!Dktl>!m3B]0uޫH,*U_bCq 4*-BхE> \yCXkˑ+H͆r:\a8Owƃ9G9|,FUTGCQSSRm)4%aL-bv#K(Q*Jt|1·jTzrp7V=2 =b%Ul.vGސu09T^49AU7裨3|,"DPE).xR-~*&TJ*s6{OD3r#{|,r$ Ai2cOG+AR`.Ok2T0n*|,VizL#̓zO>Ubpț4qOU0]UhoLPE7"g2h0t/x[B+Ua&5^.͕KD 3K{PC4)8+yԥGFK"xtwIHńM u@ 3y«x|)N*/5y2A(q5^.y2ȫGF馊 :L-q9aݨ43U| 4+mTq㧳WC/KF}ʾKh>$ݪ=6d=qM#>В>$< rº5=|,Ɨ bW=a-UTDljl9ZEx9e=$i%sT̸vLlf~.,//ϭl ܀Iz -j߾[q< eAܝ$-_:/fUWm7מ+)཰HeH&MgHM"ES}}ݗiP^YyQovg9L´b6|K_`e}?^Y~fvl*M:/g[+*of Ё?|͎G;Mc|,aW_fz\v_:o{irug§Lgbt'Օxs 5}O{/bpo[&6~|ަۇ9zf[Š;y_. fsTtU˃x? /] L秗u*bzmf96^3t e/f|*wYiv0"8Ā19҇'ֻ ZQ<4`tm>}ȸ@?maXo0J϶nvL%?`C) ~6o8BUr?~Kƀ*c+ 39x5Md'r:^M;TEP1S {˃$:AdJ5|\EN@ FzPcASju!Nkaꪚ$1a2='RHNhG1.|,l\ӀboY#fsYFUR cwÐ HaIx"=l>A9E"B$?32!O 5 b(փ9}{9|$bQDx}]"qn.V ߿GF [C9~?1a>fJgj]nː'/s9fn^G9#9kf(FZ8nP4x̿" ԭ0j1!]Q/Wߥ$Ӯl?Ly95u Uh1A"%+wB&x$agb)7GF< E>ryZGNQw+Ȩ\n_5Z([tQq[ >eV.lkLq}p{Io,D$jFdF \&f\Xh1tc*YMJ{pflN.(Ͳ8?ocǂ;aӎg~(/ * kƢJy kxڮoP8^󩸥c6xQXæca5&PMc)cGF!xt7:Xȍ1KLJĩª4X=Wa6WR 8r\5(J]:J<{"*M:2ZKkiB~+9n :aSGv}tBͷ[aoA'9 /w>NBk p <5DepNa&89޺(]E>fG(w6Wh/);ip chd^!G5#.yڇݺݧMhk,KD;@J"-R˲@V.Xia4ntzTml@H0]mlFBģZ\[baGzexpLŅ|"tqٻWJC7FFHFxf<]sy..}6K~E`Wf\9O(F[f-:Rz#!wsT"s.CIkT\jZfH vD~2_1K:_vce3Jt )ɽ%SyE킠ȗ;['z{ZNwj7R$ pyKqA *a1S-GL_i߳Bs .aLb6Hd[wZ 8= sB~+Ɖ6)W&.#<vb0}Ś H{9C 2N!fY3Y)~?#Tcu 'm̾8vFgp [`>g@b@<#'&3c)pо֟gP">VL"f A\hzxPHcQH@.N~_Nrpe6\6C[LQ/lZIe9ՆyEA=H Ef&8_^slVϟڷ4>bO>b-؃ᣏ7 J9:᠝-'USEgJs=^\9vq|uNE@z\+/C(B"ҧ>k~i q :D%y=B zެ<Þ9fBP.5*>R'bFdN|JJ1x5_> n w?ƻ*X?Nkif"f_[-dyoT&f`-|` wwfS+ҍ,!vɲc[$ٵ=R61AvP!ˎig$o-Kxcu~|qB "49fR,R>zI3ܚ;B'YWMMUT}Ho'Mq411!QvM"^rZ ~$3E쐄{Plw'4qDZJeΧ7ImWxǾ'N >qLr]NwGNlwbzkkNHPS`m/ \v `Pg4) ϥ. {:¼0lX zɯ6ku" K=밦ڝ~ !^4y.ދ^IH2N > d?.fʑ>0h{Nvz3G\+O'eD)czX\W6"m$hN/*.Ȣޗt1qRMRHJC1#~/SI*&@Oފ{=1LȀ^lڠrqR#KVQ1uă T6|ȗȮU HFx1ug~0%t(!E#6$xOBrǤB% ?.gno֗,'w0YxiLnLrRg2wKNKtu^)xBMvC4zT5mC^2]rP;%4zcXwk`!vCu pxZ=l;၈as%ҋX2ItʤkC#0r b@ D1?,' L [c@{Zf1D`93Kgpi$%9C1ЕȆSNPC%ؐG)_sBL=A |h`W\:y9̗[e9wdda#a&Cp3d(kR獛M~b>|ǙX2o_A8k  ~=ݙuGնWq-wi4OU~~#p`|z~?˧ .f. Gp{a?MY@W( L 3ynd_~z~i1ZNg;WݠЬYRkF&wRk; u1~XjX}SzV/4{`7W\a X0709nmWrV a˟' aV'qꗮOE|!LOfu9YT5x=sD d0!O.yN3b>65w>_5yCzC\B_?չEW"1L9$+lT~DΉO GRWm^"K#aJ[V[$B jnFRyy`"cc `) ޗ˙}SQ poZqolҫ,U @@>=-*[S'?1ʲdi  V]:8uz:e jQxQB ͖Bte@Ϸ͚:yy4X@{/pٵ/aP8'+iX$Al}X.  ]p%*o#gbT (ɑ#Osq.J+X A'yBFPz^ MFc)$xDHp48+Iv:ͣ ^f)'[}OkUSCN5);eo~m%N > ) hPw}[t\(xNޝ{CQ ^>\S': eN YEEĐ-*dBˍK e xa,56 !P1Vg-BrpĂ0NtKÜɴb XuVu?׏ Z?.UXeͤ zɬϦv {Z0~Ct7)3? Z^4G6$SE8,Yzx+\Szo !6k@Qk_\R y©F*iQeF٧!BEW`tlyq=C1;1%X_0Jmp)kZBmi ģle+:eCX]:f*H1;m BC#+Qy8t2?/Ǘ=}>!|/8Xp yCY0 dܡki}ƀjP_rSC&}G0H"' sk8rr'y\|QQ2g<2b AGH$tJP}k^RS'4=!D6bJxl@:_uo4({lt"Rgmm?E[KTM#2}rH]0 JldW , -0FGWy^V{Z̐i3=R&tNsLu7 YdV\INy2YL@7ߚgOpl4qo'y ** @۰Չr,!42*Y(AӾDB{!U9PS'b4R$Y^!cf1#f.yyCYҺWK {€̤J J]̯ab;gq r~O6۵.) /< &A"W8:jTd<9 Wh~/]kSޛ)(XvئsB`ԦCY{A:H-cEmINUױz\)ܦ.`ݏV%Wˮ,1V>Qi{l{~qݱZ&|t>:fR%oíXnEU:]ǥOkJXa~>+}(L* 4e~e_?~&7O!ųX9L9r^EL(N-p][#r+GsF$co "uG^F43{bZѕ3a{V5Ybb}4o}۶ܷlq:7~mNbw]7Ms]6Zlvoܯʜ=|i!,n澝J6I]4e7Z7~1]<\l&᠙S*2D{'QhY vV]TShN#H3ϰV;D!B,eH ~f]9;u rI5ű  i#;f"8 IBc12Nc)"Xp6 PPݰnB<+}V([5$#]`NXU3f0We >Ɖ (D7 G/е6],yMC }C~nһn-wgGY=r{ކ7DOmQ]o?b14-I[gt|n$YA쌚|bٛ5C~)5v3^.gHRQ]Aḽ[@"7FIߎ໦/[ /ڭcfQ}e+V#+Z3-u+7"чKJjڕ|vh+YK5/Юwfxm xF l )%Lq <2\8G6VI?=Y egc jY1gf`,rbJޭ^ `bV IC܁hp&*l}czPRw%ĥ.OqEώ:!VU46gE@h=~BL{9GjpvGsf)ߎ~=%.+dwnDNB0b & QlMS~uy%M'L=۰'pM$<;[^O>yAJ3{G&qk U?͑ƼY0yY!!r1WUi~L;q2Jpu>r+~u槤3Hg~O.5?8.9WdvU̥m{jp. Clj%em$$uK\ !S/@ -ʋ7+ifqqㆍ! u'J[g:ERdl#D,hKgA~JecV/H}`=pa d F0B< ! '&vQ+VSz,!.UATP &"EL(j ~y4+%a0<]M6݆}AWgOM6l QXI B@ ,EbD($ЍĂWXPhyW^ǾZ A| j 93nx̥Nc_?g&ZDT7!gD c :KC'SqPQ.jW/))!G9XjEL*|R_Т֊2 "rMD/"!9ߠ:JZ] IӚ6 `.2FD , 6$f׃آ⃂>`E-0V\~5ާ$LK!~b;##F @$vGKu->Zv%ą~ $W"}{m>3Z8DZ%K4IpUx5,9PagcXU\H1 /`"F !;j+Pw@ 믁iD&zTQcu mCJ$:'QY%.LT?_SԿzz N^BRiњ1`?xl;CX_oKK͖A$jj^c>ZMGRxd" 9^x/7 uwNG q!lBRzGq 3)g>y/w5;qf6˘/ޏCsiޤ?('5>=o!?vpYkɵ6,r(~׶1|'9M7O3u3 u>."'jI(z60Ў{H˴qTK^_$+^Nk#J̓ 1);v/~іۘ+ߘ+!.5-CQdxյwUU.euwAVJNWn%SkfEg*NܑgAs<pQ W1.+}{!:%+<*粅+sF(0HSj_N`>L{>zc#}N[KȚۥ%'"Orwy +GE$z4҃c78d:f=xIExq8r<gc.7YU Kw^㥶R,uśEYm/Lv SW-߸]Rd;&Kq RW\!$J)U2䘷;aopM_}DUXqWQ'E Nm bLoT zӛWW)JuDnO)wyW׵\fҹ`K^ ^pqox_!ίb?M(u!t!~bl~dp6!רxy=6wv$z]G V92ȗ c : ~4b& 뙱 &!\θ4x O'O9S0K !lsѩg&ahSͻi</kmqH&)jhr:NGK6.FF'qJn}+~>c=mzdg?|$ہ쉷#,&~䡯~(ys~tW(bdFp\-`]?N';nǿme,nmѝp;mCI aJ31qĩtRPrkJZ變_d"ClX`JW,Q$3@ՓEpTE8Wƀ{yL>UAf9L"d[#4ʲsbzt3B~TK(]3B84\h7߽9JuSC. ~s}%n#nm]c.Eݞ'+6l/* ֽ@5pS#(Nm LPPW^K\|czHurrwݨy$̈́|?zK7UWpm uu|;oh?+"ϩ|vْhd8sM(ѿQP6 ьZ!("愛(?-7[EKkTO"It;?ȫ|?GuU4 jh*=,4@M``(qŇ#FxՑN_" Mk5r&9 %p$PB.21)%mG/nyzjOt3|=􆯄fC?0"'DyKhznꋙ`X`_|H`6c'({osY~^ t- .FZ6G#`XsD ^%B88r$igx"Zba8&N zOJls0RRNͫ%lKg]YH y\!|ΐ+RZX" q 3(QAG-ȃ=8 g?Cf+atL`Uj8E3:<0cb)|ˌ}af'OIMĻ7AMzƥB_ڿou?f=ɫ 9~~n";źU]C(AyEOkvPBb*vX+Zr&UB|PA`B{6蟖|Xp߇|BbDYRRCk>d;;.\rz%’",".qibDžcS1H8ۻ%zF3ΣszB+/(RoZ6`kDRpx/b#+r?crâ| |K60$Bruie#,#vOQx! ĘlQ**V䰶*iL[' $=E'v{iV1i"AsSQ vay)JAZxlȩ>r*!.]-[6X-%Q°vGL&8j a]`bG{sPV+i}-"C{{uER_hA?~hGEZ2@BԺ4N\R1^Q04p)$J AJ/2P|8B :w0łڤ AJ7hK/Q\U+!$T͛(NH P!qnb~B:NP*!>V]UJኧ)Xi @*49b%RFD(ɣK /O /!>;!5-[FI Bܦ41"Rzkva'_C9iKhIk-C:K򲒓_T^v07< (Š>W "5#'' 3na/idu9;ZnU fIKz^pWAT@;??>Ώঢ]d*v휼V.vK]ݐgcHLͅ٪'l(&&8՗U0ڸa{[^,\3\Q!hXčuޓ6$+zRG ^`^XOcEIJ}wGUXdIjf4=-QD\ )t\N9iTi]P={8 Ϸ3#ڢ^rM(Nt4YF#ӣeܭv:]c~6BOWW“h9*f=fhl`,+,}.Qb^6m#Lh*"6F 70p9m<jǘ2D*ʝAȝq^t_H4@hHï6}я6{[}8OJ˭_J*?#P@aWOeQ,GwϿ: zMz^U 5}w.E7X%,`ʣg g%ũbFYb, )61~΁I; ?}4&&1ϝ<ǟ.qRܟLfy/*3tſ.įe'B5 ɵFpB*XpqÓ]\-Ņa|;]GGKr?]/|XIse%@0,VUKa4U3xyBB]k1)"HJ,?}B/.~^z>Osvw\m7NL=! O%Cv^仈0I."$= S _^v>wnr5ww-qdGqt6b#P+w?.l7|B/kV%ˉ8Y,ϛSv2GۥX~cSe?}ŋW/~#/zL6EZB{-1l ikΩ?9g+:>Pes 1Ly HI?̹SEOZ^7zpAHb%dl{g{v}c9`@xˍD"T!Fӎx:ae|?Ux?}{Ǡ uĖK"r no By'bl="3-\",RꝾygcBXeV_cyO^NHrXTs `oX+gh3$Z2Z UӁI #E-cZXB%*嚙ӱ' DS47#p4 o #& /$W p<˕/1x30/ՙWi )HKE"{PyA]C^qK T8i+?Djv[ l50ڰxo 3-c]9J# Q Y&k!tO#L0/ FZFlڟpp1†:5vj\SN-N-PNXGN7$oJp4K7nQG%y^-Pyt̫و$qlbN9]-@bfMk.a/g8kk (gFCx*j=4|5!ݫJ?=5[1"i00ۜk4^qB sOa%+3 %*1oD.21)7=TRе }p$ -{ILXI:ॖ@qx#L@Jؽ/PsQƂ?a5OIuMLp-"oMBN<FCqxFHB>/P@`Ve B L #!jI5 8@utBsB8,'ceս[ <8?N3oo-\pն1lðr&H %,F"5uUGN;nPӫ/:G>HDtG Q/Aqq%,$qasDtܝC^7,F[`tk/rs<sp"3ࣟJVqCeV+r(ns~BHÉ59B6)̍6[q<<< 1g&^nLNTf> "N*D..aM_F㯃ޖS$t:pEB@c!l^˜7>0z+H mmLi|sWjĹ{/(x:Hj ߍ1i7l|+i+і8ohZ9@4sH\q$$l2 焍@/I%a͹Yh6Wv2-҈\ETRTH8=8G$5Og\hfvK9 ]@Egcubaovt'hgQb*ɽ2[\;,s$V߆ݘ3#z)IWhq/@ 2 _Z-ƀ"`*zԃ)q${G͸~~S)O05V{޳V.oʣk] Ž1m̖$U:pZ/{traq@n4!==r1- prߘ(œ(E3NOE+rETzp4&𗢊Pd$>&R^¥֧V$6B^Gn<)qv!hK9أp;%{}v\r FAeUV%ࠥ$Pi b6 ]җzp4&9>p=:"]20hKN$K[]\U!;\(=f1JIQr1!y,ĵ*8BmLqA̝~1E^oLyRD[T8E,ȳ YC48}5ADWq;*{p4&u".htw? e}^AoLZ*3vlG䕁FƀZ(A=8 ;uƣjAk^oLIΦv]Bb9$DrpdBDe @+2ocИ W?x GiB,J hLc()v b'=x+0,x9v+Zu(EraA[`S׏GGgQvP5&2i@doј8~e5Ĩј8ZÆ62r+LX Eeg :א2Sy0Dַ܃?^ &z #9\3HkH24kS!]<&)v&!hG<,+x˺^1^`f<\8Z*YJy7>$mCq}U덻2kGQ0 7HtO(*^بQ!,c+vg1b8WT6ąI%S@1pmChjuo|v\ݗCTZ#~p?E!|#\Mx-PJʇ/ٌ%jwJ/xD hDQW[%goA/jKV?-"}-1d'-XID69 " TR(W~Qc O1vkTM%pr'δPE t)Mra#D' ߖ.f#n2:fBiE)'Jk0\![KּhL6J$򔣄$8Ǔ<=,_tLQο][ R<6}VAhL uVCNgP+9]OV+ˆnz*Zh|cƔGIs+f-<* .f*[HԐy%;NmR(rV)m+W4?)Hݦ&jf)l:f)ssBi-gUgI^J3SR`i ^T1P5T޵q#"lsedc  1`bHzK=b-;aH$X,X5V d{uXSRl.!3tf5 R4V@:iCiT~kuSZ0-1m,VM-3cTEcMI\)N[7J. /6ĥ5:gJE$Q+"9˄ pscR% ldL iP(׎J@)TOz;L\M,-k8Gpcd?p9P0 ` *27\ePuZD%8oX?:ҭ-d>M|3aGM l29Z^chK>Zf> d¬]BM'U-ӏ9%+iA2Sp [&P07 5;[&{GI`+eѠoME]n72;Tw }Wz `n\D2?_Tͮ<4"6nRY*]pQp5@Z$`N\`O;L lHP1cG7k,v[HwSq1ƹ _i6~*vYn)da<vc= KsۡppY.S,2C|YDu,dbfS6 FY!dQCs@!>ZfJEn$H, *C4/r,8ehuJ{AHҗ.~oo 2spWQHU+RǖY#Y2$*o aeLa{$1=YJ5}*Oq҃B2q(nc: nE;oe\[Ѡ92o72;$>. &F#UJuMpȶn wcv 5>f>D$Uwp^,1mڱDa}u(+Kw/übu)m 'N<1N❓;"_ӝ4\:>ih$lm̊4No|,j"+Ukh9Σ3~61jEx_-\E4WGI`j kxAl[7 $Ln|h!h2sZAg6`NԙYױwQ1I$Pz/LJTKM-3; %oL>Zf>'g ;@- 6Yn$FN >ώr&?M>$kyYBG.u>ZfH`nր9Q75}̜>Reȕ?NG݃ݴ̥yFêl޺?I /~ڔڼ(Cn!l5k:upeTDj1ч?B )Q(5nh!\c =L|L )^1h!UKC|LǴx-(OM|D#ZH@B@sR Oc2=L|L I*:@|D )x8RB%J-1h!l(3"p )''>#ZHA@,wmGဒ[[d^SdGxX457h! JVx1{B zD;A%͘,˝c ;{hǴxMIjcFLȇi! *1!U]RfLk,!#ZH@FbEpoG?˕Z`CoG~9gapJ943Br`=yh# Bw8,YAuI_ ]`eh@RR6!RR%gw#M|jo~xߟ }X6tK"]r+;m۸^ҹMۏ<,򙻴\0qQl$xpUE*oރn~Qw?{?q$"!Ks9dtY:,C!K_59<ϸ&>!g4wQ*P=E?MF_nwKlz 1)Ƿȥ9@<|.؇d~8dm՜j1 ~6%-Jg." uXDV8Ns0$^Qz2aMv$D"+¥{LrI`ҖFSʽȵ Nj\8'|O=D|b[&[ [xm!( ]m[+p~:JBw9:$h>``5ӲyARV[40? Ϝ‚SeaR Ѭ$`msTRPLAف5:_u Ppf" *%Rx"%eRɬ,̰BiEI%P$ $t=2g:Cy-w2o Y* 䪜\cl(L[ns VY#'YO"+,]mM1 XgBJ̒n˱l> l?fa!sNA={*{ҭ?ڛAU)آV6|ݬdUɇlZ)oMwߔ`D>홡`dr&CFCC|CW\nB=슴Q>$J\)˥m-nKl1\e"t0N+ԍF5z ?{_s` 4뿾FzU?K_|ތ+5uy/oՇJTƲEk_ { ">fP^o I0 -08J01M1f:tl%7 tGauČ|n<""br0otVX^NƏ#XgJ/]DMUOl׺^]=(ų4zSs_6~48"3LUV  &7sٛ`%Z[ * Q>jH.tUJeM7G{0Y(+*#V0@!5tLe 2Z V_^mVX~FX=W?7׿x7߽$rϯ__B2-Ŏڨ%rcz放7Cmj҇q1^Y)*y9l 6lpSܪbt|aVzu<.@ؙ[Փ?O~ j#Ykk_W\vqbߣ5jM!\]VWWjhCO!Co@0 ;ZNȯ< 1u?vf':25tWߛX71n¥<)zoW+J%1a)?/Dz|Η{Vo7)/xbNV /aQ>Fb9%pd¹ͧJJzsڊ?[(cl'K4Ey/{SGg._z Om}<*wjA0~zz<!x@=pN0zk{r/v֟JGbj`+p_QV$v>w!'m;,ÂPi1Ӕp n)Lk8hBlVYy)Ĺ@gs uE/]=2:~y. ^9-Ԛ<؃\ 鼷UtFU i2FS]$Ln|&$ 7Yv|3k? sO7f坼c}@%yk)QP[D'=9V2ۋZ쿗dfv8)"ឱ"FPŐg9,brsE - :s49(:h!h.y߽:h:ᐒ%DHtn?EP x{w[tϽZ(pjx Kws Ƙel2@lIj? I(reV^f9[O<׾vO=%3 ׮Jψg?\*⁡NG@;%X>ƃ0SSbs"1_4N A47ϜD cd BexVeYψQKy[5/" 1;Aq9v8EaiNpZ[KI*yFj*u4C+8(aTLp] Ir<sOҵ2W?5 im4c9r #Bx(}+)Q梐s e^څ J_Jip׆')7PCx}M|DĤp !sbR l=#c&ˉyW;. Li _<(-ӫoz% ԷCh"crBx&-zd%v8ԑ_HPãzS &"lq>CcNreF15Z)窵ÞVVFu9:Wr[gg6!%W-B lGAG5Hxc7!YD™L'@#4[8;m@ QWqA\HQL$z drrXva3.mjț"_5>lQ-*y`D7ACEpI1,1V`=V-vǢl% bY9?F 6&{ ) sZGe@6&Vu8HӲs\h}+H} D!3+=52ɠup+Ò,2A3=i}aB* zl> &G AJ_a6M("ơIrw)YŢfb~j-KL=V|eUU:|CdYPadEM%h8>@U;r;0"<5M*<2HŴw._>}lqaEwʽEs!xB]Qу ]|nAgPހWgУi扏oZLrNANoZ?Y^8]_y9y[cvy -27Ui B0LO lF?at'˓5h_-q#`1SXX?\"<+R#(8d|qmeZ.DU}ȕ3e&Lѻ%>c.LK\+`|2fw?2Gu;2e'\: t4~jd~͕9HV$KA{jpZ3SHoLV&,|޼:<s@"͏pmTR5S" 8!jY0ӫ^37M 9LNFJyW&A^]tUNG5J.gq d4UqUOX> jy;1L,'byN]kvVrv6!m}fzΫek.:ZN>jtucb1ޛ\GJGw":73{77v&7vw|(9@+&j5T=2L3r,bD4>ݨ!pZ֍=OI6@RRc؍pNb(V82mF%ܱl:߮f3){.辜ܯgS4E ӗނV^7٣jogd `U,g a1y@=mJcp6{ؽzA)Uݳ7zk(U7 lWK T1V$DrO DR^6Wrn)7BC5ڌ~K%s4\u@l&(۾CF8%8ZSrr+Z4[1:=xQ׾ ӝ \{HkQ-G<$⁡8A;%X>BWNi8O .{yDHxBcpsT-yRNU:zoU3S:dJ=>XX32?Jhs(Mq5dn- [S8m,e!$p+?ۮPcߍnǕ p5r,0q8,0 {"`Y >(A=!R8jygjxL,.z%#O(TnF`xzŸ#Ę՟ohVFjmRPƸ>1hB[O2yOaǰʠ5B ذjZ7쨬XD*IP8zٽi񣡀gmTVXDZЃR3ۯlBuwKg↞`I&h/ǒˍH)rp },3.r>Тc #v}؇UGr4)K-綾%s(Yet툧lI6Ǝ'Sp^)9TJ͔R6u^Ʒa6PqWsA47yrܝlWZ:n^o\lS,+VLWh>t44N{VA&ʔŦIL9 bme΍WU7ι@66l/5R>Pԝb@Ђe]cZ cBʘNBB$Nd P{ZDS,;D,"c̨TO4a?hTfLr29=Yfӵo5Ob, OY4nt Uc@G`H䑓`o>PEy+G=F/r|Ա#Ls^i`>`q01 Fcc!XK%SQθh.&c\]ձ_!5(;Hl~yXRn 焕UegƗ{^$o`T$Y6+0z|Q؃8H>~lV2WN&qjY |eB){lXUW?`)QmfF>RͤI( lX3/@MSƶbɫɸt-/V/$iō5ѲE{;ʽE$ :9'ǟ7zbq}H/%TL \goǗ<P|! S[V?(Y_t 6D&bP.7$K!S +0@tiL9k@Swf"Y1m, u>l(#:m1X(ZX.*4sw@ȁUaTir?A}fJ/CGOLUoz[e/.vrlX$^?9Ϗe~y}j`"w]5`,Yy [btnr]ɴO~hWaګKueӡ4`FG⨲hLY<\riuN<=y[vWHn]"v4?z6y6X/q V^mVri# 2b'`tW?wo~ |{7_@ʋO_"WHi.5)znPTo7t<7ErVV77m~6l7UA[5<*Jv9`Vry2K5vSo@MxM,ʙ$uI_)ek{\Bد..КlZm߀Ha*w. KKuZAtA4b2Gi;G/3S.ZtW~}hV柃%q%}v8zI^YNt#r,Bq~4"Ɇh4b*;I4 TGñ{@0YN8&kZF%{tU6f~SΤ(c>ϫ% uepW/]®X܆oGwq @Ϊh}_Ɵ^߿Z; f+v lɊ[vknUe`+p_#}>ӱ35gL_a)!6Q,`hF]|O+_4 + $k#CZ3Yl5sks9L[2TQ2(c5 Cԇ$PHm?ONw^Qe#mlpyL`QJk5euH6 |s̴js~=md Ic1F1s Ҟ[L )&}) :f&}6P tp6вуyK^;J^%DxWuTn͕g'O7 j=TkjRR-ܘUٵz_Tu'z~]vs7'qi9,%뺺!!㛯Ct@+V-},xr ĵi +{:hDPix:h/9 G b\KrsPfx:= &cdk+sH+xƖΏZ;NRYb[VWOEU͏Á-wM;u?dHS>L.IdH/q<{0ڕ V̕ι:q*.PWJ~>DoMG2 07wq2']ihJj7?rj,iz5OugCJ xg|cx,z"c듼7>Nn5|bt.o6~22V3=dNrMqW[-G-Mˮxw׸Y.vjl!%}e` t҆:A( ݜ c[ށ^'/ұ5U'CbL[^y4O _!E]r~"F_gf>:,-Ā3eV\6lW߬2VA]mTg kZ'rUvr%F昇28:r^%WWc+_Ъ]EU9*7?HgXˬLa`IYLxuAXel Bٯmo[gˣ8^EikrOkPD:> BvxltZw=fNdŤ?[#3o+#FEdFKLeGi ?Q&`6L\Nҏ({r\ɪ4W4^곢ǀgW$/5S6iʦi"Kμ'lefA(IJ%6=<f )%kϲMR x mE? A pYš}E?Oi MA\^?_ 5܁<;ChIB8X& aJ`):rJ),!7TN>#}&]T밞E/btNػn>g 5Eٷ]VX<mWw{xEʽ}EZex{o_D/[eϪ/m=ۨdZjTGmk鋤5! v, P0,NY߯zO׃Qe,:u~? )np0%-'1beP=g/pΔN ZGTd[ah՞2#,RFXp4S>}h{9{w~񎠯߫ϒ{e0/Va=kDb1vE;@!} cjmۘ~mN6,QryfGv5W#z!RIQHl09&r6ȹMW Ȱ, Ƹ#aREb?D h$̐{~^eJ6=OzEZ&Y/S͝M07t0Qrn¥㽃`rzrCzO6x2(:o;cƌL"^ˈiDk45[!-m`Oݍfӻ2n`p;1^F%^!klua3FN6o{x2+۴M?;x;v:ΦSTq8K5 ~^SEWe-w7|g0l,xpȻ-\{s7?9[[|f5Cx~_zPe{4UmUݬVjxonܨFrF6EqDo 7LM[7KOs:47xyޅ3aMBt#g8l74/ %!/κQWcƲkݵ\~VtA! 8kBSmM@S[G]Rs nis:7 DI*yFj*e;B; E N&xڜ<ަ$|2]M6ؽ5@]aNWbՃدfv]G%~F2p0S_zSH0p >Z6}9o|8cDF#,ьY"BKJrVD);X&Ou*%6!`BLª1jv̦K)µLkY˲p ݇ /IR.)F,-&5Q(ʽJ(S98Fke$D)lE=`:q(<*tBb* p* XEL )d4&Hci{erU2F^܍\$\Ϭ*i$Z:bF1L[#A $eXL1L{3xVoۆ{n35{8ՌXhd9ror&xNt^29}q3_Ѷv 7rݗ`yqX#SlUE`XV%E6 3vΌ hf\ jfQGCEQ5ǫXlt'q!P oٿ 鏥€b¹5 S%o 䌾J yW)!爵7oT-AӀ`7J ꕠ3Ĉ f018C_lޕż27l"nm:ڵ2X*R{ zYgc:-Y)VR8*r2ݍ܏${w'(ܒk׿ָ0yx>jfG.kQ|Ĕ?#_zmDf*R9}5>OnfQ >⳷q:ϣ' ]?|ͫS|gGMc,aA~wkf[+9?"!@>!@'aZC__WU}}վj__WU_,WO°&^R1K9wLEC3*U=S1{A/Ζ>Μq>aK|C%,Ոs )^ C2E( Vp S(&.y8F|˲hna}P\ X,N)2%0EɈZ ʔލ0B݌+ӑ HitL*"Q* K1QLf!SKm#faK8I #H0!x^QR{ (Bvi[dѮ5d-F8E^)ϽfFH֚F#h2>'-rq@˱/Λ—Hl5sse./4ɵ͊0y1Nmc A%mf1jvz7S/MO޾)=5`XJaD~>;Yv>T.u( lXT}hOﮦ6K^Mƥ{,eVIӽ7րS%YhoC_$`A'iB]Y6ʦ!i}nAk܇RR@ĸzV,}|)xm<1z2u/Uo?{۶ѥ /m=cm6m訑eW'=CR%QlQ0@Hr.3 ejz=|֍ִE&I` Fv{ -L9Dg)'`@oӿ@0njB!Bp1W w%+F=2IE/@}/#aKENǣ:zrd0Fs EՀdCݟEgLr\ݫ1G6X6E?W3?"G pL^p 7@/5̼j*O="Lc k[26| f n**Kg?^~|ǛW}8$rzo^ 1 `Im93" 8%jSz//cqٌx3S#RZ >z3wh%[B.OgqeL囼+K):E< 3Yu;7hJz5ݭzy54'f)Xra ":E:7 \V4F7{d^Dgzfc`&QcC2uT2իLI5NvGxm*/gkk}%ڡ|-^h_n.zଧWQ*;5ZX_9E.vjqQ*%o-M=Uв-܉oI.|?MK;zU"Uq9x=t?e $vk0ڷdsoK"hLѮt/#4^zY7gCXPgA Ƙcdt9ZͭYf$*ȧZr:Afxl?ζp#i}ջI5*CzQ$#/g)Oc.SknlnvtE ?lun١^P#Pwmm-S5# @m P0E;Kl▱13EQ3 LqӀ@Lrqb#V.@|OSz0$*8ʝ7\ Ø8P=d:0giZ+I3 aJ@TtYsl/1)7V 3TXlu_4֑v3ݷQ(44j+`lV(/*ՀȖ'nI>=޶i ѭd-20_3#~D!l `6JR.WDx ,Fˆ^FcD2Q&^jbVk1S+C*խf-`|x9_/Oyՙxdǰn lŝ E@0Tkj+zFQČ3 !UHR  ;Hm*ڣt? LRÒeT8 5SDcK)!hs$3\tHmC6e!4zEu7ò&2E ޿|kL|Oo˹G҆X9`rԍmXR<$ 8-OIz$\GU ,Ai߼LF3<82P8D 4B){+SI66)3*U- &ja5yrQeY>pgݔ-}dkW?jL9OCkz/ C bk(&RgE'PŊqIۨF|peShAkZ "$E`^ܐD#L9Dg)'`@oӿ@0nj5iatH<\^t=KEMyhGKhV}bS;[dx^tVt]{5l9=7~UF/3#{Sƞ9'ɿn^ Yߦf"WR*` 6Ǖh%[B.OgqeL囼+K):E< 3Yu;7hJz5ݭzy54'f)Xra H:8j_:֦1 Lf%":3f&'_3,J$b&YѤx;q>]ȍ?Ӎ4}L6k-FLm9wWﵖP} *ns\ÜL$rGiURFr&y@j~ɧpP|z}1:T=tVq|FGOʥ~A FϢ  h5exh%`qX-"ŋؽx@JوݣWz7e20Hb'Rkkc8͙cAtiJx w 58ЙvW>iR}JS/qيD:e[\>bz.@F!i2V(|VBaru!?O7kKFe<8 eg3|-De?j2je¤cpp$邂cђQƺ9cve_>䡨ut5I DkKZUW|*ߏVmV)(kYx/Om(aY+MYzlؔ|צZ m=`vm]j,^_n.;?\OdV#AM)8_d Hz$AbG2U$_[Rx+SGh5+p}[}%A#\o/OFrwEnE~&.Ԉq!LLWN:Q-C5 ͅZmiޏRMn+m$ڂʶ[&Ppb-oNcd,8L0K'A%;kӌTKR1z)0beJ<XGzPd0>< X+d̽Ďaiq(,};ygk@QAsgљ fKIw!7g}\oϖph}[=:gYZ"oK9?q'JcYR)c6(Z8Sdt"БuCGf^(?EwfV0?r"jXɨw{[ґz^ʝ's N(AfƸޕq,2?IC/8yHe;[=3'z>(hX>^ahIo F酜[~dnlȉϾW3ʿX.lOjUű|;C뷮rWL*#{|"wiӏo^m4r[Gӗ][rhwJ-Bpz =R&AIMФ ͵3tj %eՋ9V.<.o7?fGU+ ow3W#Z;DkxY8%3xC-@rR!Db:Q"|,wLDtpb]dNnR~/ 2b 52?KXG֌Kt U^F}J3v싘m9oBY W\A&gZ!j˃l)S{)cb┡lA0H,:&%4DEwJJ^dK1tm)]1P[am'Ջ~VT+ O"hB"#{Y \頄W"jfN ª >tC~A.x#@Ɣ Fa@bLPhkQcD^XZke 1k(!J  6)MyԂ,H>`(PTs0! eBs>dlEвVZI;@(w6,ok6SfSHu6gۍe}O7̰°lXyG[F.ˈ.t>+ Pz'n!|@#nO3 PIg3V=MA0}TgSF%a zK6RxH> T׏KP:KnԖhI{ gˣ!SϓI+& x2zoq $eRq2.$!S4E `;PUuReb±F8$(7LTH#!ÌN%itIFr=Q>X*(55zJ2F(|^:aCݪY"4V!Fqz 1=!B[P-{ Y;B7Mvr5H8+tVڭ Ѻ:Ks0xT:Kօۓo ڑK/O 2AǸ͑2tS16xc2"x鬝t9Be0޼%VL?jM~Ö0Fʉ?{\:@L`ii@d.B"XLڼ/<ɿtn{\SigP?qOUmf֘ M7xwp6ȵC[ 6Vߚ9 )ð(.j~{}󸜽цFGݎ_UfoNNo͍g -Ͻ}Sۛ<]y*7x6ohLia\YOYs2ɫ(;1{}޶ׅq埵rZgNJߏ;s^Obz?N:^%dx^+߃\1-g}'D8v6E:u1 Z FUnEKRS8 jkQT1F~|?U*O}5?u@vqө)q(ӺWܹ9L͸';yէ^_Ϗˆ/vZ</.&<,!=zϮ>y(Ɉxa$,xuK:BJ#E4rRcMoE!m3&,M}ಋ%*%Msg˼SMoТ p4~bm>j p:d2^YQxvZ4e-?Ƞ$ޢ~#.,$Vi"ß ky"lp I̕•Q&RTJ& P EBi%vPpcv[8Kթ.Gm%*~è(W"p+3ZhjkJШY5B\YE!2i ^ʅWrի\xj<0ir(}!E# $WWl 4޲daA&H%*)[C]; BJP!\,7/p\БHgǨ6F"rH 6<%XJ#40t*vſ%E ј#pQQQ l]H%Dq'tP,PEEeA<]Z =&/vZR:3{_WY7gwćv7Ϛi:.F Gݠ_0k>)zgP$"NEF ڲT5١=nw \av kKq?M:+?;Q5 :1hdD%܉\bTH/h363p8?>c;~jP|mrFm1z {~?UDf1嫏UЍdi o;K~J%LD&]=[E%Kf}lyU5}S"VwxN')DBʠNCF.=WHМmt2\Sa(I[#IEDB( "̦viN!_B+&!Z!(zeE=' 2FiMѼ==՞tWx!T5c3vK<]OM=Vjxhjb{r=9՞jON'ړSOMUnaI,نGIGΦuFU' EXw\OeIL/T_2z)CR2<|˗gE]gN叏kޜ O/T(F]:{@5!>y;2=%BBWu¡*i5O?nJiؿ)[!]{gu90iɇEe[Ozh!k3[2(;ւuZ:mֶNk[Vj봶uZ:mֶd[im봶im5ZC=Hta'{;F/D҇Sͨ8y{qsm=Ҹ֝Hx[-uܖ:nKRmq[seQ`*^MZ!֫ﻏk|<Θ6ۢgԢ\TZd/yRM⌹+9Dc%$Ė@Hcֿ佳CU?Rh:7uJH2=GzwN{^NNNusv~ϤofuG<&|xD:\[b&9iӧLAQ]M[e4<ԑL)SCZvL!xw> B4D2X| {\f 7o3hdbYޣ"BD,&Ĝ΅jJR[o`1z/^%{L.fU L+`ZKІj‰ Yux,Z(|# s`5M].۰4AKP' 4jPʙF:OH6< UNʡۡlmVaK~ZAگ JZGCT`V%2ZᔖXkyrGSI`B"2NrYr1Rsi*?7K.~F9-FMsɬ{fr`-|Yb/WOjT|5bb>Ȋ͠(]*-AO? ~eSmlЯ}+}]^AHNd Vwo1^rECοB!.NMQ+h|s =.~Lsr_7/e^L>Is򒾮_{eEwJ/.'"䄦QnQS^6բ('|@ίasùw?hnΡko8#)zrCԁhˇKe][n W +]e=qs)1~ǦpËWꍊ&ͳZ.wG׷xa&ݝȂE@ d֙2LZ*\}9]v1WEiCxaFGn\Wfy קޫ }|Djd~໣ݑWEV$-HnᒼJۧ~yhPjg6u͆wZ"toPˏ~~~w\ۋW? b\SuLόH"N{.^|ե^o9!̉3El'#U@Wph\o*Yib'3f-W3R2MMWo>Gj>J/I[/Ìf`sQ͘& }NusiQ\ZdHenvrTΛvHue`zΙmI_kE[ʔ\_ a3p}} ݳ[)L»f T-oNCH!k>mm6=L {2^=jҙ^03kX~̓x6%}v4,jӓܲL].@mYj'ݍm-DS SeYDۀG HA8'BVݿkmZFtz~S~O޵Z /!.ϓO_}-xmC|dGg/o57 uTX[Қ3 ǂP0Ӕ (Lk4ѕ%^+_]z2J|92%!(%:cV3qةXW ~iTP?(c5JU}@%%!KJ,ooNldHZxkQHS^+ml 1U1 ՅR̽l`9Vͧ7!+W{ɱ$઴rJÃ&³2xV3d^Sde5`k U cJ#68R$#Z{ZPkn1b,YVOI843?y^@X3QJ[nO\iTgTJ~H'V[B_N KCܛi2q&!V *҂ +fظ H?# 鿁g*AxVMUZ㪛gYV#z!Uxd!R&R/5ez < v)V T*Ͷgq=ԏ#AF8Ctxx.nTF>9_07,JxyQQMĝ<pc&PfTetۨ\*ip'%"`3knɎuP~͝H3o+A#R JªAA!+:c8ֱpfu VӦzZ+Xa_D*W&T wK^ip#:H*0Զz2uFm]GW޴؇,tB"S&Tׄ@nz ?m>jv&uLlZaCޔ4[I=-FǛjj͵c:T;3rvuZfyxJguIGWeR$|oN}_-_'0007)'kc8<,'Oe tfߧ~ʀK(H ~M3@AΧ_dA]*zpnZVe6lT.PاN Aڊ[96k?cΙ'K}o/^سjd:g<.A&'T&" /L8<&]}9K52+tsR _VF?eE끟 VOk6C^ `[ T?yXD ʾdzz%Bw_{6HK(RȊ@'*mtl-{6%%$6UKb|!8JqTQ( Ȣ`;X+\$JKF#`]w$s՗. ]YxQ!(H@H-,u8IBX33co'_V8m`p B-@^s~h?BZh8پ0@Ѓ5 wi/VѠ{j͟T~,h< L{ޥtw)];l7N*+(k㌑|HI <ȵo7?ץѿpu>8E׃ƙb)ؽ Eݼ~g_Zㄡ?K7v*oFf?xt9 E[.NJ!*M!9Q<:؇c7t֘=&3fn܀G[d{P߾IR@# ϖt@S6>l#Grc5_x19_)C0!B*LYXFL"^ˈi`hnDd;wV]iMr -, Qҍ|0arro%2Cwg-٦_o&]¹p^I3;o]V#<)LfK-Ip=wT(֢,brK%bdF/cL-̢k/9]oɅMj.Y=N?vuϷ~sCeF紥Zl@hd$G J]B:`^e.Da6b}AY1 3 r_ J# dakxRt 10BB23uS/IN Y$53s؎EˢiEӊiZzP$~F:U=%f%oLy+Jk{YQn~PqeK~jvZd^9i}իof;:wQ zB֎kX7pcS [fgAX QV.ACx{p}^c6:lł9X0`PĈx!$RJ3Ql~#r7/Y.fxA5>:wկe:>ZY.dńmIJE(2j^4G]^F*垫Vwo6Bl+X0J`D$4eel2-A#X.c)GO&,Rg2DO)1AI S"!&"Ŝ(&%eM=nsI\nF \wm4+W.%o͟zohF>~㗗WT̪!ϼI6^ޚq}ˁj%T<6=Do/y /EB+z:nf&\MxOVzU2 Ri]^@Ἴ(ISv-G{jm˞*HHKa& Ύ:{.2F~d42$.@Jno?U"?&]N7M'؟qj4y>Yf q|Zӗ`CYgi/y܋e/F,B0#BXYL^jʈhh#2&"qcIvJM7} 4|^Zb}%v \X 9>\ޮ%*L˖)zsz仉ۖITΎ=t3 ppXmZZp,vvV<a˧ܢE1bO17"뢡;̊U≕b0Tt,wL;Dv+=ԨGܰyh7nLFf9l\M} b.m,KMoӗFai~Ij6}=A=3)sE wRݧtm<;ӥLw 37ܤgu-:,fW{|7]<11Q vr#uϋxPS#rE_͡g},5+;)o k߁^`e=yKy~ 2f3e'ά|,+4W;>r9A+BR!ʊ3 mviMvgZ:DkBh_|L[vsv2 oVZ0u5{ P`q/4d9uxuEP@!,c)8G@aˉʡ=!䐤HMLc19"0xMPgnN £DULFCux cq{70t}ti⺬tUjAO=LBCcsypݯHN|WAݐ{>]qLj!* 4GLl+*E~dBΊ(E"e$@5IlB OYc*5(K)µLkYf]֜9!%i 5Ҩ4‚& EV e"IL8c&!PFBd!"M /oI(|P-M.QXőp.uXE0L#EDh7{,]V+֫Z0ױ+t sEb[g*wb$?t;WU$4.4Pz۬J|<,#>kfuHVmBA tq=l+K)6L&K@Q@#Ύ/ƢtUT+><';Htp04qT68qzRE 0$S`o1%2abm/v"9q6y|Ci$S|ץL{ %FQ2"kCRIA7C8]Pt;"E RrHd 3R1`%S^HKUAP4@2AZ|W`A ԢN#W+6cD TE]QUEU'L fE*0yDs,ŢKm#faK8I #H0!x^QR{JqH!]EZ5(s8E8_1Kw>?#c]E=.{.+d*yV~_6 E?^}paP7.y׿|w0//RxP"uyt'ﭶߎLlz3X(|y::*lb0w_/+|sbnMZ50:,$g/?Ww}h>x9%Vj's}LtC&*ե";\/9?+TGk T`#Xnzz:pMjF6wftCd"Vя(X'c|qw\w ]dV;9cE|``wnc2F>HbO zr<JIbGc{eƖVYčKAɻd>(%:cV3q)ߩXZ4T&(c5JA>|׺™ߵ}2ĜCm!3S)$Xg#~?$~8Ėq^D%wp%`q01c̱r4⿴;kH)RKJ\ 3(VOԆ$mO[mR RA*X:néʡ]T^wo:RqXtWmT5)np0%sH[ 2' 颫Vt 8$*8ʝ7\ LŘ8P=t:FY^1Tk%ig툳J@\=z]}g=xחYfcAzͰ͢-v $uf=(yӤZ/`LN!@s4s$C1[=(LphK\sg29Wx[٤7N(\_ M;KnuxɯVB>ĝE#@~1 -LdRG! =+N=?Ïk1!Ug6Y+7s QPEA7WQy wF7ް^˷FiRW.O,<+~5+Ϥ׃-lM6<ׯ.rM4ɄT훲,|hf{-`o!V 2)U%V:rX{O{oGzO|rȷR*/ָ/_OשA  rH# be}0z)K0R@<TJEt]tw0!v;ϯ]:Zm%c·W=3o$PۇeК;yʣ0k@QAsglr؜RrPiCW1'."t\o8v6Q^ !yHcb!\Vڕ_OX,Qd eJ+gI16.3nV~Vsw E%^ei;IO7g%lR^N!ҚJؙV{K9ps6#%'CE&1~xd21k̑0tpDS{QcOܸ9ftttDr's| TH8 S eptXpG<` XY=a}ҎDb1J[ ft)`n,YydcOL.DpCLDB,,xGI4툈֚8Y ăo_n,[m y)lKpOi7ޮc.U! }+3A"1:/l< ,tp!BgmEyD 7|޲\$/e̡9+ǵa9@"X3¨5i"uD%Ԗ!cY"kٸyRY4ɧҷد\(oOFrv'VhvH@HK͖"z`zRoߟ65ݨȋ={|~}]kvÕWJ>*L,@,1!H ,VKD&ae`f8āJcoAU8YdEpR4!L)RF81JE1lَ:I˹J0PSJ4#)er"ф"HiP5Gd2 GTU2ӿLt הanj \ҥ"$ؠJΊ?j$20#Bf`2Yi'|dYU0 :@; oQr&=}a'2)jܖUgVvDHʓ%-71NqHTDKV) sP%#aSr!k݊j˅BX2\aLXiņ;a,hbX42aT((,{-X6qMLJk7閮i : o݂8m5q!E`;֧;KzRh)!!N#˖ $̻\33;Gb 2'nt vzcZϩo0\ 5àk0\X.ǧZBrB ѦS>ٸ OMw`НƟ/cr8 x$ lou2LwZ 8`ZFLtdEȭ":_t6rl%H k`EEwo> ̂HKB9Uaa@kCnc'-_+և5$F;qI`fGU=;Sx͵ OJ4X@Ħ,_|#2h-J R>RL@SNq飶^V/cL-lEמ搵kK횴(mt+f^סkqV~}Cing8k@]NȠ >O! TH'XJѤ:"5` @eE.qVqf@YGe9AQN^ J# ҴRcŒu,10BB23uS/IN Y$53JrckYӲeMۧA)& (|VIa {D+F2eg>ϔ3SvfLٙ);3eSzRWg2}ZOi>-ӧe 1w qBpfsW#2|G?w-|*ׁ )kwҪni8yLvZ%LS2GA4h-Ӡet4ӠepQA4h-ӠeLi2 ZA;}oPAMA4h-ӠeL~C2Cst!9F%7=waP6|FflS]iEN6.@1!GWLQ19R1xE" 8kBSmM@S[G]Ri,E?{WǍl޶ z {s|Oixf$[\z.P7,U䯊*f/!:7 DI*zFj*e;B;NxM7)r68Kތ!n6FB5&J:zꋀ7C,~Cz=[DėCM9cDF#LьY"BKJrVD ^$@4$6!`BL¬1jv̦Rk$Kȳe1r6AIRW.)FL-&5Q(ʽJ(aĄsq$HS؊0Z&4>á e8RsH :q`UD*#dG(@7{,|O1]ϵ!:xfS<-3J# n1*(6Q ӖAH:Z; θZYcVJR+2̂+A9pO@A`٣ħŖu{Fh 5/ߠ#мۋW/z"&zr 7k¾EF;KKx򊸰#`s/)r[~ Jl}$˰P']1h{9bK-3>h&T ,m]Q(,Fnc)䔊d8*@&b w{&Q_Dڦ]tX[Ģ΄mvM.yycN\Iڨ>–LBN~P 0rs(]?ҫT:)w;vhBJ֎Wjx<-JA3%WCdj3ϛFW'ij|MU"zDbšu뢛|, [%?٥I^m"j[G7i޼Ov7#͓QD^9y3_gFZ@gvmkߟXhCYGNuհ|SvS-OXؿ,z,R.̬eܴrsrtRIY {{+ ߜ.YsW k+D J7.(Yv]e[|@2[lsYDN,XPX(Vިei Ko{5tQRx}s"mXLWW/*v;UnޮtOY,=kW, h|[>f8̗n7l=`0V"Y2IZrd6qgrR{U^)M*=v݆~tuffhk6}xz:F!Iѯ@.;j$20#&V3JiPxf ei'|dY]@=c͇P5ǞYtk~jM!9wn]j/5_p=a^빟A^eeXW΅{ϙ.qX/;gPȫϠQc'4ތ;D E@~62=o'L/E ${ʉ*93Y/|prJڟNDD9HOT~kCCT!sMδy˩QhOفǒ?ҟ}/ng^">w8* 6<5Q2S꽈Ad c tvv:>ʩ@0^-MQn 5]RK^܇YTV.{~y˞-~etg{7*EVs/?Vޗ'd~|vM/~`GKإDf Aq'"_?oIrV9u9S^:atZo&{-Tܵ/U\\?2l\~/GYsOnR7ygx-bEk~F gj)k|Xv MdV;9cE|`hFW_@u+1A`}J%fgHk,cAtiJ8D(Lky:VܵUִBԿeR9pD:e[\ZN"bU LJt2V.~5ruPn}m֭}s2 N;=8[7kb78ؑ ]ٛRa7"AV' ,Y_K^|D3w`|7(84gPMb7TO_Uϧ^IֲdqbU Z0AKy$ \Kr5Nfxele0fp7{P0vY4a#A2sVk/;Jx-tL `6k3f>ˠwpI33"Sw[Wz& 5y&6Z m B# :ﯯJNBU6?K%{Y8G{!Y%zRt[&d(J]RKam 4O+AIyY?ݠ)Z&UZvtC3qg }fbX\՗u1*6%mglMy0ϣ~7*^q\>ݛN/W2:ez)np09sH[ j'1behS=o)chA3A0ɵ r B($@ch՞2#,RFXp4V{^Bs%cXţnNCfAFo`y-lb !&tqYڱz[H}k]n}49&e*=n -GHn%KdL/(_|fxOޅolt|Wpܖ[8l6 4*so,KP3k:#$ӊal9"ZPc#)r@ufKZgv0b۲Έ&". An5VKD&Ƚ<8,=Ñ`%TۨufkLΆ+ָtw/6rԯTt6bB~hvyQ(LE\lULEV'AA>A"e kǝB=8W17c`1O X2@QAsghQ`) NPB2 Kpc䬗q t *.#0ywWWWu^^W^ TkV[/ͻV$FjBO%ug*jK9ThX)8@w cÀACro0-0#4'(xAci1![ffe'q ٶg_hUk8 b:X#Ó/^ɘg!)}6/PUj yoI?eqzf_=eQ"oxꌠ3NTJ §?ۚ·Cn}=b_ MQwHm[E)q_߳D=} [TYLg3M`q-س.7Sm`YUopQCV!mVz7aoT4ySg䧗AŘUBzv׿NHEa@UsЋ:Rjsfȵ1OfMceL>]:1nwAΕ-Cjs~9n""R9c6(r8snZ8r?Iܹ{APTbEPIS9(xV'X  )!idknLj‚tH'$Y)u*[l%Ǧa<}qAlnNˊsGk7Vwُ@o MOᗏʑ3 kp -%& EO!H$#`LY/ fc"{&CDr1E4^JJd 1%$Ӧ5Fzpk`W zx>amo{l,S/|kL3"9T@>Rљ QB 6:Rj{[mDx+B7p0Ys.RpP윖m=7JP"V}݀zyܼ{45د6zmoh{Kخ nwxdg7ii"4a_r7:UQ -2\hV )+lb8rX re(ctzJ7XfʯqalB"mZx>* Tnfii d@Yl]+W??1Ж!D!eٻ6$ n'~0p<.rˇ]C3M$X9~!EQCP$itWUܦ=r18-GL\dՙC$ؽl+j#4},jD傓mtU8R8ϝ :@A(ΝɣeaQȴ*1E!l>K^>D!'$YȕkwE)*w\+u3IMi䁥7(Mv(i`m(N49cR.{[z*6ɜܾ4QhG&ښWFQ \THG1[/#*97t+pKtGhC?Uq׎ܵQyeqW٥ݬwIUv;&ɟ5`0c{G*@ƒ>D tPXZu&lόmBrHmڐ+{':-8,K]Fr$Mʔ#I3ĂIHcI;ru<31k8cJ#$JsE.JkH&581msiuON;iS,ɂr5jU֛ܻ`[(-yjU5+nuc<"2)ʫYpu. *G4(;k>A[s% `KKE,1i<E P=X!:Ӡ`]TN[֣!rzrJ0MgHm'nk8;% .'eb4臫%{D'D+^iK]ú Gi Tz3cL;aJ6b;Y4gBHQj&[C%dWVy)bMiq,jZ5ol@_jzTLBZ]87js,b˼pv AS X%)x7ʸLb )CIjEJ"eDJmg l×(.Ts)HBbNj7uqU)2#eitYFr+(W5aT%]Ǹ)/t$[5b7PsoupGš2K &Iixx԰;}H#tp*n Πx(w Y#guE\ )_FW^T84kۡ^1K.}/^ CWBEVF!$iዿ"Q"n\+LQ;KQjaQ5s䓗9xN94 u %Y‹{ʥ';9K, -xLXyJ6 7!B{wrцgJf~|}Jed4Y>9Y^EYPiSp1YTu{J0#O'̑s# -s=.rJlΘb<+<,!{ eZF3)%0Vxo72x8ʳ,2}jgM6N Fn»wAnڤ_ WK `ZWԺ~Һ=MpE}gZVr~ln7MﭞoqR067yw `Ϧ-G>9cMi_t`5z`mXC'Txz:@Rm]v5kY_h*Xu̖l ;]sW`t~^b[cBu1}򣦶%I] }IϜAryYI>7CT eƔ,uU$Er7syD+uCO-yͰ}%AҮMOeW4xw|90adY7%}ndA7/{z* vpKwwVn]?j4u;| wǙwpfǘc1W,C3VOL|,/%nD/E^* $(2'pY:c2sKYc(<,h蹷2ODתT &)/@OyJ>XQŝF;nCk?f y~Eכ:L"ۂ]!Mq?L槌6I&BĀt|1h#ժZ}EબXcZ+.tB q;x0L;ph,q3QЧ୍1Z0*Z9,tuYQ+s~D? _-ԖzqKjO;RRPŽAΫէ}Y"̮ S \8>%+cF=(k=/ud2qεx #Ad9z0pi4>*u'[9H2q<:WPk0LB8H(uO} q")MeyMJNp0x̠KEQ!)+T+:v 694lަ[swe67P K|7oP[s1Ɍ4>G+8' @ͬ_xvU. ug<?񝄴麍΋yz7]T^AQ`0>1ց1$7e.δԉȖ6ͯi7&yQ)^O'atqz}7дB4N-ފQ2fSmKI, 3pR༷1qYVg9`΂#Jg˶6v.<ژhK )׆iŜqQ0^(R Pad0c@ЪRfA6I-xN8 Riwm_Kb7R߇8  x!5H|- )CHc@5WUuZq` 3f8$ r4kHZ>r:4 ee'$cZa@ Q8$1 )DXҠJFbu k0KZ/iG-a{-7aI%U7*9CyS/gKpë?WI2?8)Rfȧ J7t$/enON#LhZHeWWɳ}ͬK.E|'Yּ|<;ݲyfSpa:\#te_.;2/)٦tu.LbN^ٛu6޴Hv7 y-]mZ/rIv݇`pI:Ňܢwvu|oAu9W׬]T*֔CHԔ@/$EV 1媓PV33͗ άu~W;jLJ]E_xyq|YQ`ֲ^nz9JE*LLa[') JnHb΂wDðq I UoN}H?L.HT т{d6 ja<yJ}e{\#`y8\Y W^(X(^&nMmWj'mJ6֎;ّbK2]]IUVxqZb'e<k\y< X>stQg{iWJ̪T\ *zE42(UӒMäs;NPiIYNtNz+f֡qtWBIi?pҁm+F٫И+Zak4(R B<32>2qªNm9pw9&0geo]NѺ[]n9]^Rno¦_7Rm>@AS:w_U sn w!LX]nEN6tM'GsMGv4k "#u M5YN7^{lu:f5,c)8G@QCʔF]/Tj4vC!(8v0)&8 v֝` SU~5ʟG:=4d`P5lobC2j^um$+4DE6`sBΊ(E"@u*+HhrIlB O1jvvM,<2IGgJgiQ~Я\R,QraaY[2IL8c&!PFBd)TFL6%y6!:Ixͱ#2\*밊at[b"oP7'ljv(o_?Ғ\kd"o2ff3_ݎex0\U^1ؼW5,|ԸJ-۬$coeɟ/gՃWXbmsΛ+O.;g`hVdFmϳy7J*K΋773gge@tvv>Avd,銙:;56 C] »rɢHG,QX㜁4YԒ)uu}}ܙL#% Dt|PavVs1YZ1Γ5ևhHbYd>?WE_¨p0~+κq"Ɇr]\ 3LR4,X~Z/,s,plUQNgWˏr*l@W9`{j$rGe YTV ,G~Ch(cbytK P/?VޖG8~O^9-huXz$O砥DV.}"2胜hx]`9h7-bK-wY^%@O紾}` V/oGX|? i͙y,3M -G;ӚbL>Ui全9&LUuAy-t,HdNE,V/@F2V(Ig LUӶZh1J  H:& CZGqrPBR-=ӭGcxGb16n;"VVgGʉt K,U&cB)7j<ZRȰ, Ƹ#aREb"D4vl?e:B}r& {Lk͓aǓj|T l]"ࣇ !qb8X+Ͻ Q>̎:B2fyyϙ<U9V:r2=O}Oq3&1)4f4I8;.r4Kp$!Xv ? H`T9F$㑅H>HԔ4D b ) V Tκyub˯&Oͽb9knnw|QTiuUV/&MLEV'AA>A"e kĉSHTޕq$e/`3V߇N6E|5>%iR&)_SX>gAR'}kyrs̎_8<=f{wN2Opn:=V)zcà-:0CJy/4'(xAc%>Xf\)2\\pwC+6\U_FNH<'V+|drsXCV[٩-5-ШdBMTAOCRŻl*0r\d[uԭa^L1?dlꁅ`CceX]Prac`O$TMykD!uD@7lhzIٙ^r.2μބr L ~/4(Ķ|^?>;(k,ߋ)o8A M5zS~T29WVՓ{YC_ӬjnB^zKwb:A4IŸ~r< 5T+ƛ˘vGכI\EQLX3?§t8>{g>Ȍq@xb`B%!Nza$8e!@&F3pCs;q˙sDhaTd"Fе<Ӏ7[4Za l:iS-NH`Q~0^̨#,r!d$9$!R.Wq䰀3L:xxS#.Ҁ&R2x t\HQI]]P}낶~^_L79V'x+s\ﶄ=%b@NR+r4%vYV O9R1,Zs-wXBo%wNhHmiZuW^*Є^3 8E٠7 )cAohhSov0G4[8Zfod8KS^OLGp 'jpL"^znFs+%+cN>,P=:K2!jņ; YjT40K*@Kicdz- I WX,)s!,$Sxnl8Y4\G (~os|G򝎿n"z}޶$W| ovbwQaSa滬)Úw~ұT^)Q(kQJH1\|>jK.M2x5sf"%ccY/D)HV~U~G6KtxH"`}Udlnyz=HFr}(03D%$)@tY$ R:" =f@qJK+h"2&p@3P3  D9'kH>rM4aL~-- /6Du|X8O.>h1͐ʟοH`&(| J(<Lj`p0.БRÅ(}˼0oޞBSͦPL7YǧO̠-gm7\P,2:/^HG(ڜ-rm|̓5cUnU5YBK68S&ۛU]%d 6nwptEvmց)!u.4=v4g'8e\. P-jEu.$1JyP4"Km䀘N%$Th%1V!4F۬%no << Kz.;2?hg1޼_'\9aa>JoMBNW0 ń{@ZOa9{5۬6v;vjBZց wjy }0Q3%ְ]䣸[1ǗU/뤃Fkmh :jX~Sj'$.MO%(~9cd)YM7G7{t&AsғvpPؿN-`-hܺ|DɚUig[| ,w˙ 8e̖1ǘsuˡK230Z=WN:J j#iܱX_o>ȆN&d\.AJcrs&V=/fBJ1_ʳw'%ba@7{Bu|L_qqmݰo؞gZbgfpCj% >ΠcJdRyk7ؚvJSuDE_E__g<P 7cކ ' FB6xј#&re |xsitL<T0 V `M#&C0a40 Z!YcGcE~o:F.5-C[ ԚdB#ꓝQbs-M&@/8*'Jќdsk,,^?AGy7Gy":TMR0P1%8 ȹMUtYqGkZ )$q{?6,t'O't:+']9Cjj#zzPs -oyxΝ69)HNnjJEUYvoy.U֨i 狔ÓvMvsMs )^ C2D1%2ab{ ޘ[C::/ٖ:wpnuOi$S|L{ %FQ2"kCRIA7~O1t@4:&Exa0> $aUH&HO0mK#{bf݄r,6\!) b5cCa۬2 c%{}lRBYmNR=$̸e0dԞ,\ށyW\F%y6dls4X:Nǯ+?+gb7( .ws6~ T)eI\ֿn;sqQƂ..w΄ :%]Ri,u˦Ȅn lUbQ [gid/q΀;Lt5:Ec|1>zbG34oLBhVcƨS0$15ևhLbdT|,$_#Pk%x)hd X%D:S``2+C3&YXZoY_yޑd\ϼYuJ@0+e єHd1ï*_Woښ82uy#Aٿ2RPK,<3&3- - eofJpLC$*CYKIVClB mqLkeP¡P@]_Yo@ǣ4o;W]{CgbSt6Y,%ULOݩb2ct&82!`B /LI"HpB7VEHZ y"\ ΄4WdB0!*!J@T`D,4C`|h$E+sVGlPDE& !kGp GSM9pJNST"WՆ5AN}\qCZen1z/ޮg߾L?6[ƪ0vNHVALë«9hĒ?D!1ҒA֨!g$1KoId}쒭 LN50N\R5) Nj UQDgYF e:g&!hRhccquŽ9锗9pibRrI3\)!y9GV{aֹ>Y gW54YoRh"Y]m.3~#1/1 lA1lA1l"TVhBl{-tнBZ^ k{-t5XRSNgнBZ^ k{-t5Z\нf[^ k{-tн@JBMS%Z8+q!'E(b]̄Sk]o[_HOzWq^*Hz-ˮ p?++6ue?07] H`{BdJ̉@ sISԕB e5V>yG٭,UAԏH`/([']C_y0P N{mW;i䩎_3)P )EL$3D̽!VslpB?czRa_@nx4ޥlq9ukHVBh8HP.H-JŘ3:y"FlYj˩U)lC][LxͅD2z.TT~M-w\U7.$;ԫ%^kYK|5&F-7׮zӪdV_"&7rޞhXOw\V=bSR@Ԕ `岍SX)ƌJ WՌ=- eL̂qT=)zJi _)*cL˨jkjٯwj+X]+BtIurDenӇPyzHݹ2n'_'a!ß5 n0}O<Ʈň 8bȩ ^4P:( 6ꌖ56s !eam8[^[D5$,K]F2@sa4H)&O(/84\ }d1kbP lVAI%ŴH&pA(2@IZ&iW-a'.mQlhMNE^qu'G莚f"Bt'#/m^ x%Yi RNs v2db1Jq-y F&-oW_ⷜ=gV?2B3U2-a΂Qx)Ӓ-EJAAF3T)$cЁ:89 Bp|JG=`CR4PSko B>w8~X/k/Qx-Wͱˠ#Kp2tRnJ>+hEJ U bk-YH5ptapu21ƴx# c>ڒD3 f`S GX ӪiBAL#% FJ:2E♄ٕ޵=T~p:q<"ǂ᧧ Ӱ K@ w?Lo|\)Fd(qr,f,4QE pY'c*;GooĠϦ3eVД 5f4&0ʻ VMApcG=ӫ@=HY5\V9UcfհV:r8@)GFҺ$ݦ&dCH[sd+~T}yqK[> 6s!c4y 2`)d, rhSp1YP-kjbܪ_kcц;cٲ Wyy׽#>{*FRN^GF"w,45fK8F`*€ FE!n컋Cs/j.>ޤX㗼Μ!_Ej_q}(Akh7evh Yl(KۿlZNw|˚4J99m7vOOy˟&mnK]"Zbu7k!6<\vI sònEn.,XM(,&߭i˒>t픭tߠֆ}YiL}ޘs>&-o8^fBf8w,|3$wp|X=CYo]v.i.bO'Ҝ\my@qL.;6pܜ2-93l@ݢgPie.N:}Fx77G`s5i'J @.VJXa:dH-!PZp9ڠbF ʛeJCFk>M&Y3օMW.KB!OnT]T{{:T wx:V KN)KBR&#&@J'n;t~A_5]\QD~w=,(0 3Iy>Ȩ%E"dh kx4Gn@hZ˺B[S2Jqzj@It)rVVqQ~4臯2.Ϋd)#iė0=ö`!ֲCң9SɊQ+)؄/Y@P#0:DSU*IHUP4̥8/ 45g2HnԖhiʲRm82ʿ $)/2jrC eee\F5ɸ8kyJxP.q+bt&~ dB!E*Ŋ5Lv7\xFg24,#[WkGU00dI"RzȁU#Aq4V!Fq`LdmR]S(.!k?tf m/gPs^%VR3ȼ/+G{1=*K__D|iI>1 ׼v3=~y[n_atۨ7qG_+(Y#'eNLPˊ a?&;byv.VЭEF.uӭ8u_izYiXa<{/7c<TFaMU]/(o_qGXTA0w~*'h)BxΖL1rH*p}xGB,_S)(4~P#aZ=8dtbpOqʴPXbA-TK![؛HwM6ԛ0HFǤ"9̰druXI$?wWy,=p4Vvº{X0!lB.%TVU&hk5cCa٬Ze"/t]Ǯ~Hm#fapJ8I #H0!x^QR{JvH,F i{^apM/X?+NZfFN*q}: f~uzga'|x}⧣X?<9z̃d?c)K$o[=,lz3h(/WQ;S_P+lUD("g3_?dsvUbT ~'\Pxh%9 H2/[.ZUs_ig\8<C^G?]5*WYwyϝ98(/Ei6WwD P̒&ySys 3 wKA&,믁z>9M8LT P)WsVOz 5RKQzUVe]/G~y#)*8?^Wɭf~r\p~ k:Sqt7aGS8D^dAy'bl@sr>aSAhO,Ϸ[ʉhϾ`VrX}Ԭmτa2x=Ѻuy"u6ec&VĘap֨Y(o'dCIdV;9cE<]$G1 }>E[Қ3)X*f*G;ӚbS&*SkZy!e# xZ3Yl5sk.8V/*] XRAoPjSwZ nMn>L\qŀ]ףy-MWe,TwʎdO~y_Ns寈J1rHr}R.Wq@&Alj8$Rǜ Q(M%_`wp" ^tE1ĕ"G@6C(:)[])tTAI;mIWiŸUeƗdH[^xLnyw>p}usN"<{e>zJDz0]Ҥ.iR4K%M&u ]Ҥ.iR4K%t&uII]ҤNֺI]ҤߴL)e޲[~bbFO-;L^qUN93X @rlEk;k&/6Z| [-h4410|p΀+BHsH'(瘀fmPJ%2,"1rbrX+@mip.Cb;cCZ:m?mkiWVS?;HЇ)R }f=ˑ?-邏Ac0ϸ\jA3:s?\\sa6|ۦJ*bGRy]ǰF)Dh%QjTHvX#"@ AHRVa S띱Vc&ye4zl5ͭ`kiP+]uq`᪬m CjWVիkt"xyZi+L:fWt]w=}VN*7\Jh>º_ayoE1OjȀ?.&}]+=˻VtڴYxټU}Nwlzo8=Mb![Ck\~1|.և!aϥg8>Yz_N^M&&y7jF,B0#BXYL^jRJX$Xy$R)gsMEu5˦kmhjPvaŧ^_1m^%{y~mx~gmgߵLetrr%T8 Jh1E4^JJ8%Rb%0liCn Mg[z*_eCw{+D Z6.(Y,TjxWy:\,$sdv ͺrȂ  ;Fb(xy`Z^m;(]EԝԶasϮ&vOdKm{\.*viavbn2jW_YK=Ӵ%cŽ"Xfu˱a<Қ.J柙U{&^CLZ1nkL6A~\2*٤ L㹹^3μ.T:m}m;_~L;wF%I ?\vwHje`F""QJ, 3c(H;#'lY;<29&.%so76L>F2w7O%"lqB z݅?RITؿ]LP`Wi3mlBrj=fT*"zlܕw^^`뛌fOTJJm yj^+d<{0 ɔ+RHT S(&vK|TO^3`> Jk MtbpOɢ) KdDR- ֆxobo">mW ^QQ KW;_Xa &" Wȥª*Pi{X e"L`kQ? ww-6aVTp013nA3VJjO .8;A mblm g̜ t~XOmkbɲ"q(dեm3׻8 oN8??dl=_9^ l64__ި%_g [U2M*TK,|f>#@v ?g{Ou)b U%l|g%9Ύr˖/.g/q:t^8?gb<o *kgJU|½;3O298_Q;Q2@u2KbgDL9PdCgᤜr(XzG k3莒E-=G:Fqef2x~Eg7 +Ь9q1N5ևh.Y ʑIJI3DeAK. a:(D>&ySys 3 wKA&,믁z>9M8LT PsVOz 5RKQzUV]:.G~y#)*8?*s-ǥ c8Gaxx&|;S/XJjyuO&w"AeȩO/&IOg[-Dg_0>jzNa4e/Gw*-u-l ot+Zt b0J8kTKyA~!$2K +à zIM.t ,UZ(Gne2F>HbO՟M5gS T:4%T" w 5< ]L>UִB%LFgAy-t,H圊EJkXFJeF)s~کxbø3Bۍ;۴$6.dMɷY y=NN%vқ>Lqύ[FFC0Ë6Rj/9v3\N)VfxjuoĖD3Kwij!qlجGz\[cR;ĬW逰 9Hy%\ /$-B-3S9R4qh7WYs_l6d/ʾ~6v4ۿxeO$<{T_|//y,Y`^Mf?"}7 5ʅZQ/n-ZGiR1"{Tu硆SA(8mC&"Sdn0pړ Mw5Sýwk3:b)`x? Ɵ~]wt9 ۑr~yj+jUiZľ狂mMx[tnN h*hR\y0|ЃQ']Tȵ2UN nww `GP[EsN8 X9C1!#('z-,T%r 8M(rxSZGEdQ0A[R.k%!RHDcYrZ#guKeH.ndՁXj?^ԋ.5SGEXrbpfمbz%V/^ uR/ pNX#-`Mň+jav p+jk[vT)Z uXP&҅Rmpp[P'fiT`s{*\ 6} ۼXxq2 fZ&pnb4PlA0Ax[TR  UDxf|9s^|6wY&irɠ&% 1\R/+8Yw/O72L**Z6ϲ/=8aİchA/zCYROo: 7V5)Hᦂv/.'+& nhx0Vr&1MК&0ĸ@M \eoVhPT+/2F̈5ҬŮʊk 4j6Mf땚盪69ɅlCs[Hr̞L9MƁ a345`S}+܄ [03bzñ' 2#d$II)MKĖHIVI*}qyGQ!. op7 s^WYpY3bg׳no ^# pgwSt5\ΖֿKٛuU*[G `i!ZOb0QK]c+! حj!ԹZrSmr)f wM\o1w: vƲf^x帷ZK)je"ֆsPU,QfɖV1Iump-h:bhM:r=E_|i j+0ZjfH@uM+5RškY{n˄3q5sƖjhN*Uc|h0aGk83%2?]ܲnmS1Jt{ 7 ,:Q7P&]T!(Ԏƪr61b5)xܳwUa}{Ĩt ɳ&u0JtϠ>OsWoYsզ:;` JP[eA1BJr>=nyUUT/i>sqNuulaST{tFv:־3:ř&XIkNֹ[ʱ'WsXHH\6cʷ2k&PKgBr3j,֓N͋j#rcHF 9yR +7gYB ^|n*@ ԑJb=w >B.聼a:ir@,x&T@d׌|"DʙR(- O9ED;A^#0}qDg()AL7_uC2!/VA-õײf,N0Y\@f4giF#.+ +A*G6lmX̀5ԀMX m {fAydS뚱^Lc q FWkVw5TMX;M*,#""WЄ΂h9 f;CطXm#ds v%4/r3xW4G4[A Lcgj,,∟0LX;*p@TC1:Z'80)8ЙH͇ ֍u*97 d&/3Y {S4YR@HqXHqƂMYR;K@h?Buv Rq*.g-=uTE̾Q,V^$ſL'@H/>XԠ*DtpP(kf i >P#1o i${񾂵8b!z_4yȀgF\`ݙX;fcƘ*Teh5bBK.#f3s7VU  d]mjmljcV+0=GNƺES. $JDNkk*VC0Һ0Gst JWLd:[nmG⭀8K V"YԏoT?_źs p6j%^ N; nIƻ`M2*Kq l+pmi,{ Х$~<$J;y xMR~wtHS7yciU‘dZT1T:9(&# íg=3u ~ ڨbj#cR=iom-֑?tXSPAZQkԦ3t&0jVOUcG{%>j{Pqab2`jo9A-4.6B FxX@֚&#,m.o0 zs!y~Z) "^`prOQ F(0qK>RZ7$qK0:Ddibt~KUef4&6 kȎEY 4e*FRCl^]XvvRZ`4bM\3p]OtC@EJ+eoY`j?&ܪYrxxrI9ox0VBfcn Ks7:~Y7gIamAcrglh޳,j;uMI )&l@w矼2%KP]z =y9ۋ7׮u L()%>pK6.:&>zZ_J"^J<"IW7) o璂<07gSc֦mZE1zph%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J Q*>'% (w}6J @V-Jm6jz9nRrp̄l c&`F=Imeo+\['V6#jfuی1 ^MŅXiɍ+}yjS{^RN3gwSʳPukܳQBGWJ'=,_bKLp$-J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%Q=wj""3.p͍`-%R{.EvzwŲyܻ1g`PBcI9ƅHº >UjO=҈By&?|gMhv K5X[;* Jn&$߫'~:eNSn3L69͜;tRI_ɫ9dQ7nix疓&ۭC|?ګ (CwQ#rOzz 0msֿAO닼6l>;_?~:hIc_m !}~^}0?##4~HW~|ZNvܲ(@| F~{'W`ڽ /|U4y;~տ6/u)^Z7 BZqϩ19F\Q b+cA/XL8/9vxrc)LyY|$uKujrSB ¿~t.@w1!gTw}P6þ`j{podHo2G4(~kln[=4}s^?`o<[,db.:R%PO1rsu<@9^(ι'5'XyRs9hw9)j>mՅm{Aڶ|΍ q\ʮmw'G_?3XtyrOl8>g&&R箽s0E'=Ro-)OfRqlσӸYW[#+lX~ 0p^;aikM Iٖ"%Q|5ER_u#CιK|dE5cę5c1rkƇQ^ta1TºPppJQ*w-pinn6U8<ЏA+P}GO\c!=x 4pE ('Fךk9Ne56<Cl`;8jKs{DD02\^95Œqy(Zwlua["* wZP@BNǣB5"EDL/X嵎{Tw6zKVʗiVߴJZ+iw5ij>ß~f!W?dۅ`RJ[\WR)Ʒ2ˏ&ZVCTJV~rALG2h\3]JIUWOE] T1##߯Z\!֤vق5ԇY]]5m[u+ Ag,uCƬ\v^Ʉ1q*)_:Π?jMQQdas#e yz~ffFݺfrxƩ ϑAz;'sߋ$vX\T3Wj4-Dp"# yyI&x$TG~xcHYXOQN(zixYb:Ï痗8Bj"C?QP~g&(AsUbUIPy0bIĴ\b7,!cVBS9 Nz.̩7Q"G`N@L7 פVCșJ-D!2cW#RWGHPJ]erud+uԂ42TWy##͹@oͅwǿ}? '5 )!uf/6CΖf"[4AhHZ ujó޾>Ik;÷mxISn dSIڏ ӟ9(D;$E_A;D(ІYg6Rt3d|ʻ6ZbA+ *wu<“g9x)渶e9fƊ88jk D&X}\g+uA0̗lDN0m΍b/βǙhcq: gF$rE[h\4!Qtrm*~G\h&6F pguE!2aeeh (~WL2"%N2%¥G^;܋x'ڢۘxnɂi/Ir!z#%&ALPPTQ01EDSZINMB w9K-:)5O\ϔDX4P䗦`!iQQ(@./Nqĭ+5C!^m|։n9a]m"ZE5Y $gNדk68eZhHӲ^?XK*K"4W\ %ą/2Z.hZA*+H$8(6s 8sF(5!0Rų?Rmu~N?u{?ٯo_o?N٫:{˿t s9eJL;`Nl2Z(:^}Fdf#r@`~Y$Nis«Y,q~glsPz/G54| w΍(Y:zNOVuC`qΚaQ?z7[4t=ͥGҸ49֚t0tکT0&O(ky76d 4=IMig[oP_մL^ٜ<-H0O6\6QqÚLZc) ˆks?Yf9M8v,rRw%Ajy2Z}K8VO4ś>_?kzIQFlf~z?}F㯼kf6w*omVJ7UPLw7d[o7CoDf𴜾"Za)x& N3aB0{˼+`-]Nd$2΅HI@e$gSG' ;#gSjjNCu?e/>{O40MYK,YV ٺAwG 11Ւ%j~'/j` OhPSr]Qm0@;0b g!iQ1VuFQ/c'(S&S4re]Q!m1q]6 5r@':u0g1rV8Ar\?6[-${߅udf=6y3p蕶& ʧdDp!yMZ'HSJf꠨{CEvYPeHc,o"uIR^rB<%9%^xҢ齣z|6ESx]Z{,5o&9 T0!:ȫ*Du %^ *UhMMHyp&> R isSGqD65FQQ EFR0"E*Y&(52^ Dq]OM5iJ#AT#g9Cj39DLn~p }16>.aQȣد_F--c0vܜL@LWW ;ܾP WBQʊ*c_rQIXZ?Fp}TYy&5WTQ!BSjcR_,tln)ClE/FY6]w6^l ~4<<$-][ttD1%w&ޫէ?]zSIGZi"E+Ǚ$%LoшvG{GI""TIF3!0"d\㈞1(.Hlz6 qC 40b hf Jq&Τ& Q1xΒ^W=qNwSPt?UUhcRRqon4CxFC{V in)8ʨ ;Eg#q`A{s'P ;3$ɲ [QM|?uIǨ01ˡM-h !33%9DEcTXtl}޶)tUǕoU67,ˁ'ZBkzpY^ΞV;>-܌U%p:ÉbZ ,q/O4 ZҗD FU-* Qfhuhf)!zoUB))InHaO!(Czn,$˱M*$ <#:j>1c t$blSQK!3I?ȼmf~}uv-zωҔH,}9ǐUi92*z{s|bgtd/Wth/Wt"!I+ͬdh8p-<)94yέ1rmp⻈_iuWufFL[nfݫ.x/FѦ9gXL6Wkӷ()sv+,].]Kos4Rf~3t[KVXrJ{QWۡ敒d4ZeƐ4Y"#fϔ*+0TQH-+r)l;1v iMu3̀XY_N_jb~23]7 y/0ǃ@$SBe8H[#ՁHOiHuͩ[NriRwЮ'ˮԟ{G`0+XM7ԛCo+Cg޵^4EvX(_ꞮV-,}]ռ84{<^'4W^1O,^C[݉W#+3FX'걼>1 ]gNJ הZ6v Yh-qYBGr]BK==/aù̽BMe!OK4g҅}p;~jT9)x`﵅UK㶃2,vkRax~яnQc,UZ%IϾ˾[̡iؖDdP]ip8-pR`mec$!h"XE@D@0*KH*KٳW WkUXJ8*uR^#\`Kz6pUgc fi :\;v=}Kr^kzjvLVh}?)ehWzH\\ ě\{VvB]˝N" / ~7x6/5osgDE2={oRaJw͆7zvr*y7 oq̷Xσ(fH C{9OgC?]xV* +F {O>Ƭp(SN(~Ov{ٖ!˼LWm1FSya_IP&s*&m>I6󑙂a@9?6q.l"K{蕥,2=xl &<#B%WgWY\PWYZIWYJE{zp+d2b }m<(20__s /ܤkiqBJX% ? WTyA< b9AZ)K:i}<%EZXHRZx1TDco /jQr0y ǷFnl+ƴJi@y[m_c?\hg6kKckOdGt|& RӪ y @k"hai)`xb*'b}Bl!.wց@w *s}l#Cat\`RxFLQS Rpc)}@"SGQoWqe㻛P7㼼h0rym3RSGokghy>?Z1eTܿߑY^~ǣ̶IG7em-bDzCz|l 4[}jj?M44TO&tqFwu o/o#F1ieN ӱ2N:z'v4h@]J'28Z%`D"2om J.AvKeBw) "~MRw&yy>&=<rNiY`f#-loe)7^Fb dqWM$?xXzwRVΙ6Fg0b%CefWMj]75vϲq7i0G^ika̠~JF'1"DZuy+Ҕ乙:(PvB4R9XEL.Rg:EK!%T[X]Y߁OYjYw/vmXj) vEv0-6-n/|j4Q#V#2 )xmdW7!dBꎛs& ;aBZ!Ybwlj."zh52 `DUҳElQje&P(@7"Cc1.5.j҂F00-&.ytk\Tϣ b=5Voo6CN.7wɛnttcOcKXƮi*n<1 |{T*+"@պ+Z g/9],:ztYy&5WTQ!CS-jcR_!kM'6DB-ME Θ OZ"ytTȬӮ0U+&T uדN̩fruxך;U OFc[OPm{vt"vĸ!`hS=y4+@b_+M$hy8)-/(@D*ɨv&^}<1yQq g\ >`4q < H{?L7qh~x堝0mcSQ3 K$גRUt`xXCe1QJ"i^I[y#8Ÿ@t"8̗vh*%vڏ8ڷ&}̭҃}qf ؏f9 -sS٣x%Tt_>AVw:v[ ޖx-{c}C !2YʁH`1|HMy'R/b 09h#+RopR8e=1*$X\AR-lB*َYIx.Xz,<(.%6-ntNN}lt TffxZGFl4o1V@c_.E6ϙh!,bs`3dFs \*/a&&10L>&.8ۍGaŸ jEm]u=46Q!DGJ,Eαb` 8iEcw(&!N !)j= &D%L{R"FBxҁ'mH %1#}0s,bl xaOk$)R&)b(Cr(!N뮮ʝ*?- CǦQ7,u+[ۖ-08o8@z d3[XA`Ir8Yj3nx(M||Pd gRBTib ;+gp֪fMt8h:~*afWL]jS7N=r)h5.7&B˧~TS,xMt|qtZ^>=gob{cf>!Lad2Oi'WO^m֘yjrzK櫞.\O\kUǞ+&&\{=vzI~Lut(Mm{fͦUM˓g^7W7Fl\</_^ΰ%_gXU tx'HW_hm&b>T"6j>݂y B%1S>մ-1w9N™؃/_1Fq9_x{J(rEf3ŝ`er~hLX1,:?7T3.چ"2؋âPGq;GGw#jQ5 <ƍ;P4ߣt-3Ǜ ;ƌQ}ĸX2s%P$+7G *e<.8$Ug9b*wpoMV5Zg)ȄdW4b Ŗ6l j_>uF} _WPP(8;:}Oʥb񗷏|Mtt{|(p?x>Vކoτdab)S}?1As'" "hx5@N %p ~RNX]X1L?p?[unmf%EҬh ϲhg +.n<+# obхaexPQCq(wRg / fxs@,{d$Zϐ֜YNǂP0Ӕpp<2Q,L̵cGe*O+YCTiגHGbpSY^*Шb Ԡ(ֳ_\~TYc4amڒu+odom6k?y^SNoc.{p72*m`bƘchpmԪcgm@RW%gS] 3-MibK2)M 058mdǩ5*CzQ$#?g)OkA `./$-Z { ̗V܋YͼM_j> H5eZ/nJS-UښT{2bzbR.a^2:U0~ ?.[JN_qȋOf6Vsw>_A oa;p}-Awme l9d"䫀r :s>=pVyaL&2I1 B~ ͠ryN1ׄ1A{^~5ApJ®ɵQܹTp464i v YiTz| <)TDa'? **"޵_YkU&& %O5(XI7klrtUU~,<*eg)np0RN-kNcʠ鉫-5qg i▱1KQ\Np^q(:vzIa\-@~=E\y+V2ŧ`<*:zF%Xųdxąo(Y㓳4pR* oms4Q=)2 G9 ٜw(#}kԑeIZ&oQ#:aĐ(\4Ƀ sbLr=.q/T \{_8D &?:<WV3K5Yr~-Wq|ϒ IMy^{Sf'~N sJVmz\$۩48y1p+fbyfjN9\ qEI2bՌ4/O߅yuUQݿ7ΨL Rr>,LaL ֘x$GYhidq '[` c, Klm,IJ1K Z~Lml)܍.ك2}7c_YbL}Lo6fN mtL5X{Ëfhrㄢ'a%&75o3\̜,M+ lj) Hظ!By8>l4Ƒ:S4!8Ad@20V6A&Nt&^* Y/|5߉ʍꍄԴ͞;RR2m%S)L)v_H2+6?鞛W:|CϪ!G+u4P#|,-PRBb TQ=aEfWS*<^ݎ>&q=x O3a y?D$K~4%hm|.|R7NOQЂGUWG0cW-E/"W&)OL9]ra&ٶ$崬UwLۏ[K?MOOw9wxIEB9 "b[[..X[L޳U$&bbzckՄ@oGn.Qdhz0j pPŻ{nr׼ Yg_1H50R$SW},SaōUb y ?vnnෲQEKemQhLBj7`1ko,00TiUH9iz7me ئ `hj)1H_ܷuͶk6եx,TWVlJ`KKKx)渧!`mG2b1Re)3$TeJ+Mm Jb)e-Z183)?ZG8}<Ot\Q:T̻>g(X/:[epptYhc"1MP\Y"e`T<Tn&K<"o:pI9Jn&f\'э!lMi˝h$F@^(.u\KJVс!`ˢnDFtTP$H߂BSeDƸ@t"8v#gƛY q ʡ};>/gsc)>yq%sK}҆QL},ȓ:ڎ`4e {?Ij&!RDK ErO"Tڔqb-"AhfZ%|s$KIC┡^{-THƱ`QJSbTrKPI֌٭bX] B£E-a7XF>~eǽoT|%r3&j(xv)'Fך[D3Dh!'a pgYmr:j{DD02W.Fn`\.ں֭n6NDA!1Y0 EACLC;YT&!N $!(h= uMKD_XģQa1rvڨƧex.DN T"xJ4y“ ܐ BPƹIBM*$ <#:jv9)$7jsDJ[_!gލw{`^N/-9єK*6s~9ŦkrԊ/6-6 q͎XcX*S> hyX_)9C4 uk)]\N>7#3T7XOZif6$Ctq PCrL)snukÜ̕-y4sPrWQtBdQ 2*KDn%m48SWlm:^^ S6I7{uϭq8l4nhb [Ncv>yxwceZn|WsфsEZ8~]9}i{83yTgRyM'5"Z|?&K:<]`_8WwdQWJPnA&d?t~M6P >zdppd`jf(WG~3g5}?oG-<Ԣm6[òWH %gR>W_~{8כ6KamEx #k.2C;i0 R)}dJ}!c? @'Jƕ`8hbT1$M1^Uբ9yfYUgu0)Ͻu32y8:@Gm!xIUdP#P/؀l}_|-c64q^ 㼦Ciѡ F6&xx+q JP\Xd ઌPyZ5n(T igc/r GDZS 1PKwQÿ3&HтG.0`DP\86Mpjb'xAOc~Bb6^t:w z]]7M}37K}I^Nec+51bSX59f Ś,5~k)m!Չ dPu8SCt y M)VBS9 B,T-:0Fƺ\tU.g(A +HTȝ "tJT@A!T nV<g=gm丯qm>Mn 孝>>s׼|Z~.Ho2l(oz'z ufi?'$ksP . ұ2N)ϹT3/pΧ(Ti)u +H3ep #% yk!Vr[:.[ Aznl $ |RR$2A3⩣FksN/SRJ-F]VՒ>a[M+Nǡyo.olߪEnCBALCfsruf#+-Q`|bn9 ~Z\˔BJW=3(/QMrdSUwƙ_LlOׯ:ISg#yu~N&\>j|,?tr;#r}erN ^ϣ75e2G}wQ|VșWYNk~P|Z(-vCzlc>,lYکڽ} Z6?5ng핟k~z鋚^4J?~v<4 o0ܖ&z]s:~qpwu;z=ry}%2*N[RRЃ>P'@6cjW^ſ6[ q2z1)hlj`^i։fcW)%g3uP"[D(21hHbr}eR /EJ}J⬯WW7iURڋصصB `kvC'tqͲUSd|׸]Fc5bTH<*c hBF{zrO* );B*E!$|.U"\DZGEQkd?.2T)TIEI@Z x5xnX&-Xi#1H bl\TAY%g!wz1\ ɛ|V{nc|0V̽v bzz$ !i m(6 > UGX7NR*iBh_ijST NW'QiadIK5 &GaJ\\F` ,i'DOV!m#_g-׋ʺ|w7hlOJ/%ݧ? r I%̡ߜmߝxg _w'o^ݽjcPa']ϑ@sW5bSiǢZh,,Yw)zZD Rn@Nɥ)5 A#"c"@YKЍ-]B^gA§LNZ ;0Sx)[_Tѽ&+kE6ykr<<ْITxbb,@$ Es6cXK!$#-dZ%s$Z)C}c"[J؄Bd,FfɸEBUX^U(vs] Ij.Q'KYO& .\̀^^^q9<)[%8gL0Ն+"SN%Шg1B|Y5Oː?1βRy NBPP#$&DҥScKYbQS."].,u/{n{'l C҉XYLc` ^8iEcwbg|?vGW!MAr;gFkpw;h6GaG ֆ5,~=,$>l=LPBj z=L%7Xn[3Y!*xSWVDTu#VN-Qi2S8 +?'JAFxTzwB# eRTQDGm WX&ّ"awA u)sv/tεp` t`S9P~&UymrX>l<{e8EroK9]iiwW$|4VшK DDJjgB`E˜8J6#z 3@  $RQ97P1@#ƀfgBL l"B,\{tblG(W+(}ZB5 {mLӦgޣi&q87*m8ZWBsA g xTĹRᝡg"ۢDiVVDPRpZ|7Go(C\H8\`@9ߛv6aCsxM9L"U` GMT}Uͫnٷ^|g|޷~R+e]//ǝ)=w>' 'r#sҗq8| )Zy^d8e}ކh@N=!-')ͬ Ctq"&0NN)94:Ƙ؜̕#BڑyR]P:[9>=f7LݞO].ȵMr-L]um3Z?u|dO㭎?M6L:{9tWGV8r7O ynzu}߁gfoU{o˴OS6lOn.lUv??{opO|pfsj5a{<_%3fϘcͺ|uzdLqoy^~x0۞^%|n"IGji/ݖqt)wB:QkW5_: <]EMzᤀmnIrCTEayXeH@^:ݗ$*R(@P7]RloeaSeqM!?Cj P<~Vj@<7^0Bh > WNFJ\:~Iu.4w(J"?;[EŨ ;Eg#q` y$ 5q9Y'rNMn) WƁ>PsZA=.#`A h1r6Qy;ce"-N3%CO%Ln~Uvio&4_MUsp)eN#/@4!0*@UEdR2 -x"5)jcA[ଶq. 2D )J|,FΖ!H8xBE_62$59-M(&)Ok  r%El2q ěPUubc GbSt;4S~D\ D3 ) 6@uFldnQ3_܍|uǨ01},h([d7C@QggKN-b Ɖ 2D8lz1x훮*+V1# E+bZa7SI[f*o-WMvM*TƆp9<A!F\]1yY#N"lӗ zN}v-ZeyܭNj zo 轵)@OVE-\8scQJBW$2A3⩣FksNOHQ鐓blPJTGܑZ.?{M8 7p9טZ(L#F& uUTq(pn!L:ѹZAO.n(qYŮ7"<+k.W.C dQ٦]b:EA9Ҧ_?罧0!+^6ڷx>ј"hJXbkS 7U$Lq,5V`^.zL2x"SJAiPMJShz1D9@d x}׻36?h&B\Ss|!~}rr|m;BVu$Hol>`k+ʸ$"!W\ %FA8 A Q%3ibA~b 5Z?kv*:dsZ:o}4##_\0%NG{}m6w; 9T/ y5_fxY]9;qI;dlJ;[eZ3z Ѕŗp,{XG]io9r+~Ay2 &G,y$See:fmKU*>Y4.z"rVKUe]'V .1E_ Y-cЏŸ) _t$yd1-G*:e-0\ra}6aEsMhbwGp駟vڥ8A a}Od6B_?W˯EϦ dԼYjSó>fώ&c^yؿ={8V 'dE% &L$d݄Aξ }<zy)M.aguQ WFKFBpu(袵pUszf7-@ҋ]*0Wf4ER掠G!Y2y )f6 -X ^IvjY\ubi)8&=U9/r1+2 'h㗫O$O}鮾O_/ ȾU'^F`~7Ƈԭ?KX j MGmtJ:\:YRxK 3n<~B]Ri윖eJV[d L'\e Ņ 4c+eu|O:bʖ` P`XpG!1N p"N Gi|z,wgŅ/)IK5rUթ~2fh&>rT~U:ihwpZ0% #pǔis7U58:ʩ'$@&C;G ޢSѓ96ύBE-=)UNZKg=^uޛկ:} b1,,ԢawSoɳM_T2H$ʮ`hh"N'@#IBB ( i/Bz2B* I e6.WuHoA#!&,F+e:G\9sY& qsȜ+6w*7 Fݭ|8yl~ߧ: 6YV޿ٴ-7fzfUCMOJ@j|~O5psFDɻ( ' D)=,4wKN0cǺDQq8p"rB[刌+%CnĘRCڭڪ(?n݊~'xc4ψsCѵ8в~X b b 7 f b6 f b6 f= i"l-ҸEHi"[q4n-ҸE7fy8tFi(d4n-ҸEHi"[UMl k{-tнB#3yEdnp}MN*]:)ehН)w9&i]ՅԛxՊ5/۰ZM'ǃ^oj-o i i9Z{}=i^w3=?T\PZ,:[.*fn(*V⮄_&fVj-@AZea ;ːq"\DF1x'=Um/\˰,4rՍE.1F $y|틺ӚOr놥 ]SK{}gk?Εą[QRA aL*%k6μQaJ>$QTښL+ڔ%dz0G4 "yb0i\dj[j춌QYlaq-4mip!QS9^/lۻ}Yɟ6|7һ;C-6)!zB aNH rɜX 0eg b]-@POlJm!DuYCp:G,[;J-qv[0%ry)V8jV6Nf#A29)u m9zWA R@)S =JU$P Pd9FlMB@a&)F >Ĝqm'~û޿Xm}j]eElqkfಶ>i -T  esIIfx.rCtO庁m4@HN.!>YLH!Z@6FbC,j"~Ezԁ⨛)%uV[%EEŚ]lvq龱Ʉzf@L"NÍR#'`ٛfa/{G0ax?#wkN3q!iP5cXfq%jEZ/qRVfL/:q8EV/t\hSfk w[ەۑ.]Р*4X%/ܱO^#J[iJmhF&Jr_q N#Z礉X7{т,٫KJcg@~C6'aWiB,d߇wq;As}69L_%׶tܙ1#VW IL8B/ 4gOTzљQ6)- .%Q}"s%gX$' `lH@fU7)O%XKH2 `Rd_ .NVMlppa5E^D lXzk5?5,7Vz=OYe%ѣaIq&Im<4.KԔ;!4LH&;5cp*bWX|͙4Ҳ=uGz}u[$sLĔ.H0'8't[c'S%K}b om);u`A%RJ_s0s[D։uƎ(ػ8ڮ:O,pz u=G!6*E G;Ġ1SZO_ZwۻvthٜtH-;j9ۿmƻ;yR.4ǼϚ+Eˊz: Yގ9ۼRmNIV;Kḧ́5n~&nn0Q>yq7yFbX"ycΊ?LX\ZKHjDp Dᖊ[i2 R*CFJpDD0zRۜ;%JBUB2i FH̒ېgP%ӢV"ȬyZE5q<jlhvԼvsT<#vCMnCb<yZtYSpE J"m xXH68˝S4S F,{<%emĐr4&,h2RtHa%'ěby ^W͓ =3go^c2%rPT1=YHAxAF9մd^_OuZl}G P.^j3[BVK5xQY]3wnƭ&WAT'FlGdfVr@{\ sBL6M+'l3KK::4q^O[oꦛfosMݧxtugOm>E;W[Wv=H axyjQE }\7L^N{8/:6ihLf=]xKpQ5|y(\N?2uľ޾d&&H /FQJÜ׷Iglͳ|5+U /w/tYKnj x@!RJDK{ 9b8 DC#u^Y1t#zJơE8U"Ttq۪KRۃ<ԧv?{WǑdJ_º<"/ĮFWh%`u<"qy8yϜ?$0xt&ʫ WƁ>P,9F Rюjgl~p6 f'?gdurʳ,ɱǚlבnߌv"8ȷ柾ZQ$)1-td4N&%3XB$X6FwrPk6V;E!2aTܠ Ms;!H4xB7VHDlULR&8#m8Qi&4BB=3:r6b GbS;4SyEB'D\ D3=9I3bKʭ|l'.ugAE۪:<\rjX0NDoPX4&JEzc|A~k4 ]mUkr,j@(±0&>z(Ta yO'.LP^;W'Y (ךY + ]XOx| _MZ?XаNa< Qo^wS DC)WWC狴{Q 4j0?T:MiU9|<۾uq8<\{Ͳqvo~0g Uޘ=ykxtσ#s6PK X,M+'y];HXT1[qb!@= ;4`/5Nޞ%lci4>@(wߌP=>Ver6wϯ~pύ;wR,OQ/$(Y.(ѳfXg*ՀL}?&x$x!/#"mUx* > ?N1x/m#ϛ 3/NmZE7fڏO<8p<0ꤹcՋ76llsfg/L,XV8s4sQht.Ͻo *'!'< @vAQ ol ƺ:Ӽ \i!zѻ3kn=]]L=@N7#<+ƹ$'<Ml Sʱ) p6~w8/l܏[VhpU3E!ڇ,*JV0j, )$&~ ^eLȔ!v?8| s8 pyE-)1/~cs衼z0p/]tdOKnMqxcD͋]*ͭ*۝o~׆ko- ~ߝdP%R(oZjne}q]54[#p6{XbWֆۚ8oѝ<:*[w3hi7V+g5Wٛ?E{-Ƈbgjښye~KQ_4̟&l.uH0pS[^%1WІe k7jKC6x;yayR͈~LN?<[TsIaZ:=1oR/Ljbw|Q#PMg}ȿyȚ4%e k:Sߦo#o~6=Ί8>%zrgJPK} vg?կ^d=k5 G.}cw8?y?]l?fj+y__,-04_c݆\0@_%!"VtTQqd>Mr }1Dڊ#@'F 8\xeؓANOK:)!!Bw.1F*WLKS p*4gWefVݨ}Zj(`S{Qg'q\˼4ݨب3$FI ~uMQgcbK"ƻ)vK]B`_ޟE[~V1`IK}ԆDLPZ!ؙ,Ի5OKE))%-IK7zPխFv 8vR.M0Wsu+B r*=}Im:b~b M6Hr]u,t,-~_G[s#jJk**9gȥm vvz%gg'ߧB\vSYZ'V?\c+mM4OɈD;f#PCHkN4{ A y"@!FVG#E%VKbLx-*UvYY_GӴ.EQ^Wq&*lj."z52 `DUҳNII@G*!:kqq,UFbAN''T i=KwBg|ևBS]Xy=]0x Hx11C v, UgR -2C`k; ٙ^DZ8AC(?t>" 2ujN8S'&8_+ځVimѣPRj"ȝ "tJT@MRpc);2q_gt-j]s''Z'~e}i~a1z~_ͶS|xs;;l> ޭ[;:W95îq!9w섈ɶ GBZnuڝֺIme} [`J3XzDb,*Fi&ruPHY"!YQt'c Ĺ&kMv[@/9ݝD;*lLi˝h$ziP\ ZC';UDFtT$H 2O}JN3q6+js18#ZjlI-֛p  ݷ~>;&"?9KyY4 ?v1*ofb,@$Pp>SPiSԋB :R%8Grn:$Ns" )c)Z$R*&4R][l*FƶPul Uj 庝edjڛMu3|;}nσO~`^3&jxv)'F7[D3Dht![5: ٳ,Mt'!(fJmc"^鮉gvaXr1b;Zmݱj{݈NDA!:Skd+ Pqӂ1 d0 wZ@AƢi<*5!/YdK26u!@<{l<6N}>h| b;ZDӱE4"x-3Ij;ѐw* @@DjWGta01\&@=vnܘ`J-SRvʥȤRLL[%;7&ZhNVMU9&dJɧĞIHuam+q6[/į:]u :XgYLhd[hhG6 y v .A9'+%PBE?F "nGjv=ϳ;0awMz~ |௹uWF1Ե9 n~zɖ|LNg(j/_BdM|%2mV"EG阺Vߣ>SI=LӋ4܏=x/n YdqqaБV3tRN<}~#6GTgya.jB9DMNGpt`pT\*cYA2b$Bmp>D 3Ȥe,PrJE۔38*"\ CvN+he *,AH(' Vޙ\d &N칠hi'.h wh?w'B5DŽ2ײ$$'B+^xXiC}6U.qhw; Ilh3}98+(vyߎ́^W ܫ[m6}[2H1}o 7.nnb78M;A B SP.bQoIDt(ad&+ij5ZBRL)!Ȏ] HRxٹ+F [BVg f<3on7>uimJ=ځ<50w)+|69.Q.debLms1.=wՎ̌O;,I;YlQbg:ת`[Jt K=iTD؎y;7׺=詿ϒooϳOJTGOvM߲0x;إ^BJ\)S2"I(d]a=D_ aU MuO2B C_]j]j7N)'yz[SR`1x*Pt=ZX:x4 a8a R@( 6pXLkm4JPP B%„& xm;2^}gAzj[{ QF'eH(0 W\}dƋIU"KЅH9=]b8{zc`N~x{{ 6(ށ}>w'?u:t ,1֚<Ƶʀ!t)"qt:S9ftX͍Rt3£U\es=rjQ׏>ciׁJbټQT[\tx7b/MlSI]$EBmb]B#oZѕB#jZ5-9;.|Te*ŘLuG$Ns2&JLF"QSpJIE?O0O$ᴫ&CuA!Ȕ%2ď3 ֨ RV1Tm&f>j뫋8wUҧ*E;fןiYvm5-ꩻKoJK/V`◍g-E0x-Βxk+²joH$ko t!AD>jmfR&Yr5n3qw)QvcRb69Fo=6R+4(*Py)bS& B֑데c`Gr5" ʻXB)&K1 t\R>[;MҸI(C̜mc08K  h(V0E!u0ؔR ̏;9-ڪQ51xp``4bv;vڿd&/z>k]@SR> tr#箚<Ӂ5%KVGFTSKa;{{bWJ|GR2B>٤2iK QYQEF3 KZ{ F VơflSE7Pwfs~Vs^s-ɘH>ؐ]cW̜!a5e8nN+yJ|iZb6# $BM ]QF*:E^ġ6б6O[UM4lG/v^DLf4Z/l$CLӥ,:{BZCN<PJJ;/{s:dr{+꺮9MNny\]𚐩d{}d `ЌbF}d-zen|b9AlÍפ=t̫x¿uZ2 f=߭oyp8*!w+7nȺip:9[8JZv5u%@'O]UF)EqH7)I\m4%'e+(#H*!f!J` km f ݭXcO|4u~!pƐ8xZW11` 梙?|8)`_\]_ü|/8OOx=2}ع_b_'Ӕ^$boI+n]^&nݲ^YY+'^5sE;ïs\O _m4t1t Oc2NKaaדooNuqoWZ=;Oln|8=4zG'uy>ᱏ 9xNԛgӓzhOI] i8i-[H:e}'}(&mNڎ[G-ۣrREgMA1'%} "NvGr]_rmt7gba{?VٕSi];QO.(< JOG2"kd),#- Ҕ#2`aC@=E,d,>)_sJd5R$u1o+q6k8 qeU/Cw^~uOsab (Fɰ;#Θ - .EEIjt,_N`Y޷(_|g'뵓&[Q$+M+fy$ *ɶcp!S 2{). PAt5YxґBIW3qxd v5]6ͫLv5dݵGcĵi+h8#ؽ:Zy)O>O#*4׎zGZagP?s;VΎਸ, I)Uǐdr>IZ7p>D 3Ȥe,P.m"M^YsAQfw>X}:<_'0gϺOSZUwEu60S%.a8mVGľ8+kaJv4*'joAm]к؝Q?.ד^P>"rM׵ٌܱRߺ|%` "f"ΌGf: nLZ Q.jpz ",iP΁&MFbҥ%~.Nfqu֙m쉋Ez\qqu_*D`=F$t DJ, "0b €JJFx\O< ls}#7Z1SN7TQم6;ޏ~Hs?~qT.m1"GBy`)C dRT?Fİ+V+|(I'Q'pL 'ƊA>u`8EC%hc0I[ yyM n]Á]F$ QQ`9b:`̀{EE@۹""J$`ir؄S!eFQc(,<2\/5^#; H6q ޹KARWR,raaY[2`a1Z(#![F @6K3>([]PƓv* p*A%*"EBE"JFo"X-)^W(Gߍ03EB,7VKG (i˃ӰH:5֡56 .{wvÄYo[Ŷp17HtnlRg6d/3i$l ϪOնT[G3}w+?ÇL/NH]Oƒĥ17&-Շ͸0)X E%@$Ko)lp:6-[Y˞Ԗg55MhݷB7ygiu'ϻ6<[\- p -uR?}Z;u'͓a-tӋviދ,MKTl8N, $JђdKk,̎Zw&~VWyу+ܲ$"A${j2410|pe%W^X`#%)F4r|^9ڀR92Ew9lVJHK$GcnllnP<,ge6|@B~ϒ+l%+`ݿ ڑ45.P٩OzE9$͑.4.=v@s6ƕRK>h&TOsz'M^= z0]TR7>jEȭ 01ZPɢԨUX#YL,#QHYu25jX$"﵌FMFSfgK(Cuoy;$:#l U7UL5O76~9 7gS ƣ/z͑|jon{t:b'y..NuwNtsI(R bK13SY%wڔL%Y̨TTEt ;_r{v]wYI^'d%CM68DVxN"aNYзcF0w?xnbXnha 8HFS,N2%L(,1ҩ`mV*)(&҇;/WTxGDJNcRɜRafXdr : $$#z'e2sY9A*]b2N+_mq,qWaRןޝo~`^ٻ_@sxݿGD{w ׃iӦĩlUD6W i_/|;'jpMK(bs&k 09[$}Q@Uɗ׳W\q:]mB`ΜIh^G_y6 R=*arY/99ӰUGk''Owb):%]S''IUPd1. YugQ CTB'k3NE-=GYeOF*0ߘN47*>f5_7f]߀n:&2K +à zȱIM]yH01Ӌ㫿]_R*1A`}35gS T:4%T" w 5< ]L>Uմj%L)Z3Yl5s"%p*V~V0* X?rթTĞ~ ͛wߴV92g,=Oq~^RJrs?0#⁛74!lT sM1cgmI %UjY"k5aZ H_&h^٥)Shv߬;cm|P0vY4a#A.Hy%] J^1 BRna]}6/*>]Q1ri7C6oC'W8߽zl~]ƟF+K>ǃ,Uyڒ{S? 3{0'g DľViTǡTRJ@c*L$.O%YV' KRJ' i5yJp'WIZ"WIJ՗Wlˡ*W;6M\. WI+qj7)\pKb.iFveNtRZR~+w_aⰹ%xVЛwpd ' ttpA޿ݚ UCQ5݋y;snpf,`s %|*ƣAZö t< m*ߵ^▱广;4; cKiA0EZX^U Fwrpu57j1͊Ba!ۛק[^=h9 =|y LqˀY,CjtNcʠ( Y'DA`ԓI\, EN>S$%=E )2 ,}2pĥٻ6r$W:|ڍjL }ڙ&b紗 |5Wj󛠨/K$% :ȭfD= 9j"Vpʠ&Wb0(?jN8f% W4z;!dઙk'înY3\G"BlIOӁfscf址\YrԄJ ^[\5s]lyYi W\ Rl`:;\TSf-^ 6+ݼክQ&Wb6\Sfc}|pڕjd֮cf3\M良~Qk{@g p@癋|XzbWϳҏL 3fsK(?ʓT4hy'~5WſXtt'!~3:P^ѽwusDW5c\Pއ Ye=!6!ɓa\36Ѭ%;hVZ?w&dx2fU3Tઁcf13\C2&U Ujuҟ.bن\\NmVNgm_)JJGgTY#Z=ZҀ^]M{xtw/u^Yr|u1vO.N">~lS$#?٬VcGʴZ{YCQ @Ugh3X~[7GJW:jVnm}]=|PG}s?0&_/2/~/.: :8Gd6,E?/H{ه{:{LʖҡY`#6Gj" U4)TL6B4]" PHƇU%꼯mj)J:j/4)U8딩йl7sv!*##e[} վW6nW}^O|j#IYP~H>fE|Eʁ42 Ww lXxgu5 0Okx+ 骸L?_~ڙ~{-iGA?Kɕ5DDgFSw:RCM2{-^vuRŸO@Z4RRK&;ol-9&!T;n즟+! /_)3ڈ^uvv[lzB;My^~g=5e_;=0hKr|i?-(2#+ʆ\uSEZ:bWN^S0 }С \< &Xtk 9JqTu6YAS]1+)KitwebߏW) g w$)fhjیř,!_IB%IH B6*lb2\I>X@DRTǬMЕg͜iawYSI P>f H9Tdkjg͜tcn?yyzqM ^g].yTB[!oNc}^lovu?prr?ƈ-$L1&7N8hl(+dJAL;"@ ^ȠL`(dq>*N5{Y#1ǂFǾ͝QgԞ"U$ nJ \%:RQxZ% QCNvJօĠ(uI8\TP+siC\Aٍ+QՀq,m|싈3"gDܚ=-j9d5Q=-S#fl>CֆMR'DQ훼H b&;ѓ gbb3S% LpS+2;yE/s7ȫa\cZ6JE}qԌ3.n]wL^(O#°bqe@1v`-!(-ɏS ncCݔ:ڐ򲜮ȝrzUU6ApsぢY*d*6s MaxJ0W |o{j&o{q~rRú Wc A)] 9:aRm*2ևQۯ #i4cÿ{EoAclLb(*I9duL&՘TY&Z,!Ѩ!6}$U}(q|m y|DSͥ0+IgT 3"Jw5^RdSr N{j` ˯gic|[>99V'dD+t:z ktG*ɋAX%u1Vز4^fھ~˶7xrF;,f'әLT0C Q^E+Qs↗o,6dFQ4!%Ab% bgk‚ID3ZJxQ!TV̜*gjMG1p.myq;fdK<s[o5L}D} +P-:J!$4'Q+)Ls^)?r$0=^.h/FResj&V, W혨5:5bC ✟{~D?1ǫ6#\O;^Oo_[M["4"'^"fOmr w޼<γFoNOjPYOov}ͻ_m>;wn-]_ݾp}秌'ew4C9=?$zefh^w֧l(.'x{[AZ۬͑'<|vǛoܬ,=*5GuLJ-i@&&!g>3Ieߛ)g--+7ٸZZf 4.g.ZWT-@ue֢P)hj[%qZҘh&!ԫW&ĕE iXƴDgk|Վ}Yŧ#a/URI$1):)2]$9GiE$nMZWsicz 8,99B{4P.b Ԟȩ@BK 1qAz869gYg]ZEFmͮcW82YȖRB<1pSa;Ÿ 4r Q#k&LVRF De1ޖ#] '3pk+d&0^?0[~\K)]Q*F\lIῂ|7?xRkdEㆽ*,ưwwWYLVue)9GRE(./ Lˌ{~Lơi 2z/t6ߵKo~?6ng8x|{vni[vx➎]xѦԸ) Z,k8D+2~}w|P/'7ӚN2*6^;ALi6|F3r# .v-V[q&jf] HF,;PBIYt!I,UFS- Ex/Ϝ;ſ~ng'j$20#&V3JiPxf ei'|dYg]*axw(yZ[-Bm:da𜢔f7=s{dG{F9[Jm ;mJ-/IInjJEUD{Jm<ÚNܳ] 38Jۣ"w88DVxN"a)8Q&Lo_yښ:'%2Sw10>S|JCh~(JFd:TRPM'l4RHitL*"Q* K{(`$: *A)?Yn*tC3X̬, &" W%GiEaV(ʫk,Ɔ´Yg& Ze"/t^{Y9ӡ%yF2j`p 9 F8&`-C `#2@f iCpp1(]O XWYA: qboŢӭswş~7_/%L?|*<0:}`UK$[m74ҷ_?~?tumuVUNNZ" ?zȎm3(\Sbϙ* ATVv$5eu]Xbasoݥ3)xbk+7Gs2nvT$w.FM/9?o +#$3a%TcIW22qY7kpY{=XTa:4PtD-=WҺFwqif4ϬrIg;A3͟&qNc}fXBݓXwt 򋊖qЭ.bs"N>᪱\D1O0/Nn`~"}l+'Xo7W,۬ئ 3 oΦ*@0^-RSVwӊ+D5'|G'狦 (T"}9{RCqs1ݻ37ĩM+o+7z_z >/ߞ;-+إK0@ "44ND~{.@KGTohIoLrZ `<|& 7pUً?Gu /汢5@G/#.x@ o"а2|AY&J#]7]֓NYbl;/Jk'b; CqUQA1a4Ke$ W%gSՄHz#@&M-0`wcՖOmಀ1T)b+t@Hmr۲Z{ZPkn1b,,e^#rۡoa_76*>\n1ri7߇ m0ȕwweS_wއh_t cYg^QɭpU&?ܴl?g5Z {jZ[iT{Tq<1oB?\etU0U!uFT!pR{qR[Ƨ"')f>P ^nKx*ȭi|_a'jOK F%P L{آΊϪb1W6Nk0u]L i9.bA'PqhSt-/[o1AwMv^8*HI΋&Pv}eWԕ"_uF5zZU};94Pes}_>馲'ӓԢ1w{ًz hU"w*!かDm}A~*QtW_!H\WgE;D-9xtgA&Ty_5[Ƣc\Wڠc?>F .Saae^$1 e+:U,fCmW_XhѦjk4]m"%쌩 1՗.EWvp IUp;oB!\8P=x:FY>ESg#Z\Ϟlr杝`գW;]hcik^Bfz^zø̩lh+khKtJ|DȚ4KjeʤpeBRK [@} p³ >OBW!THu*!] EUT vJAk$P6P1hx(bZ:=V)(;0@>P,m=3{ 0 ^2mˇ9vsąN=~\q=.&yS*a cۀq; `&!#*j*Cn +/Jl1Ca.T6J eJ+g$]?5nsڇjBQAY$ 4I=3IIxDp If5څjOgrրjs0[+Q V.aMG[rwh -VzP̜3::;'CG3r}?m"gkp --% _G{GQzX O!,FRiK=1 $^gÒE*L<iX{))qJ7D b΂wDڶmdbw`ۂ}kQ±p7+q0G*8:$#JøBGJ 4t/}fVAo_BR/"+΢0x.gh9 moHo+?ޥc4p傝/|:(Gx_ղGKDwĎf~*փQyqZ} " JI✻JX*cC2ڜ%twUBW)\]ŮЦ%Z]Wy 'Vyqy4iZ |>Q0~SiReX Vh_ijS%TtZL4آD%?(ѭx)]DZXk$TTx2y!XБ@ Ip: Fi&.x뜡BD <Dg G֝Ta]1v6L4\ G仓D2kԥ(hgc2L[GX@D#E¥BY/+׊R!:2BecÈQ*d|Is][yp($DsNPdp/#T/uhq\94{knpEɖOKt.:y8ԽZZ!{},&u;T >Jc4/ぇud %!*YQ%$¥LU6=XK!$"YQdT9+C!DM KYҜ 1(T J#c1v#c9][b!BYl0.{Nn&_}|g*;Oz3bĽ$I4p QC])'F-j39uCQek4fqg#9YMtu 3vDĄ¨c"t⊥Yn4 K&池vձ-j¨;4U+;i*$1KQs0J8ӂu" ީx띖ȐCHi<bM+ D Iy+kcg=F<+ b-"ˆh:Dqe4g6(w5!WM6XHJYx1ܓl MшqSd (d"(GK \ ncW'U#\,JEI$.vһZTyj=h&@@F 8WQ0k EDb|sќZǂt;<|muRf2=r7$O~`uM2Pȵt\\n-W¨&7CF$cT.V@erũE dEc8|`i~]]^6m+W8:w܁9eQV3ed~Ow7qo ~6epVlCޜOl'?_'>~h|js1n?>']n}sYFٍK.8E9S#g.==Bx-gx[޲'UX-Aޤ]56$zי>vvN2iy5/iHpAs]>>͈_4W._jov,ߙ^^ z?7v>Q/wG) Ʒ^9ٿr i>TP~,pboԫ+5%rHS6pNj}{KCTq:`{,eBV;:(DD))InHD))\ZύE*IrAvUI$%xZĜtb쬷12V>x5y5]3Ź`gRLqxU1hhG5\ ^Jwj3է:ГLz"(DE}*ͬdhR8t-A i*%&Ϲ1F sV1է*^hQ:_Y9iffאnvI ^0In[TW6+BIg5}:^+]?]F&9!z;f>ճYđ'fmNv>x݀a<7m̛*zφϣc{GKs]4h .X S %G_K|~Gl~+ѩّ@s ]S0Te(RKT,YIz+f?\~(cg8C9`<1hO1$˃V!Y,Q)dQ SB0\N(X#Ձ(OiVg;z&jXZ>nv"5Zk?G|Svѡrh4} ((Γ%X㜖P' V2yGSIzcV"< ",:`80/[ϥ 4GT^vN%#T(bpEA¼4]"]p.Gy+// 1}ʍid9pĢD@;)kAH]>B}rh$="a{SJLnr/U-ZdvS␕uL=I)*mz!>3YH_*YAI&;`r'% <9jDV :)"WrzDZR+6~5NtT J#sh_PVh"1 dB4L?^n^W߉L5ha0 I9B_X8~= =S;2|FF=F?w - (ܣ7n6cѿC)ZQ\f[+7"^g%HmgQz[&$OԇsŽh@7wM?tUNCŒMp:U{LGWWub\n#PfBP }FAlۦݶsUW ,16?Bg%*$TtvΤn̺5wGy'/qK#j۲gflcLF/S1al&b}2[&MP6[f9'.%#:3QkyE437JSAhR&e%héH wZz,7<Ȁ9,'wzrd8)t$UX ExR0B04yFa-B8}&qz}u,m㊭m e GUǜw,W3rì. 0YȊyPOsfrjv*3Krj9Z(}Bp v2p'Sz>s+2RwpJi&OS2 \ev:JypNzpeiN ӄ.;c0sͱUR _=& FWOcXՓؕӸC4.Ց!'tpL~t>\GPR꣨EXa="/$@?ëx4 8|9Њ !.~;EZ|{6/3z&vёC02;gg|퍆2<گT!Ko0HpCG#ÔOUhr; 18Uo_7W9 j;U?~z;%$Rq]% bBrM3!`{d\BOH +y2DfṰQǮMP. 16 )?au:Oft ;\e.N;z9p NaUfWS+kWK;zp,>:nDIx%^>˝T>Ya:9o0&}Ulx9RyKv|X]lDW})Ozxe<X|Y^dz-F{Gz iZn_Q-2l_<efn8 ̥}wd+qt Ë1>> !|G=>a_JItbCe^O 5FťTJe ΨO[?]8N^:QaڢAӔ31^` & yN'خybT21C3Y*D,,S@q`9P\*|Pn!}>b`Ą 88 [ iQM(z )x2Aq(-ˆ4 -#V9p$L% > D3.1*'Ө!پPcUt8C+:r-7S˛CQLN^{r:jm2yUM$(U; n9|y"H D$D%`1 Rx R޳ 2 "˖WF'|sHM)Cx#uHY\) 1(8P*$XXNW)$cW,ԅPX*WY3nV(d7s߄ۯh~#6ZM ZrΘdp0k"q1*rd>Ě <6o4!,bsƬ4;wqa-]D7l m;blbFaU1Ma.vvIX11IǮm Q`7Iw*$xTP\ X T@].:iSE0I;#!JWyԈ5!Xd+1 ? ģQ5@a<,vp`~JIǡ! a˨7xQ[ 0*FC\BE{ χuDS %#F.4NEJ$gtX\*ntHkJp9ouU23S@u2z EkҢ_cf^{ +dlMn ΠB@#VUvvχV' 0hp2cPqC/hOIe+yܿrU6p=JCF02LƮ&(£Q9%140T:}h1vSQڥQ2/n0>u#3^MW}^FDp&Y5GRtG5˜P[=h2/~!K:˕Eda`L,M]Π"%ޕJiAL(/| J ?>ly*Y#l-];rыLndm>c]^BV6T??>e]ECl]?֭7̷]٬GnhV[.n:t'ixPmx啖a6nxq}.2B8@N؏w|K\tѴY@K񌦜m4V3+˭_6\KV6R2zֽ}f݉g]axΦ:LyhO1Nh `R``g xPJpSBă\J*0ᑚ@' I z&31s7զtH.s-K'Rvϣcx5۫n6}-V]Պ1`3R3+Io%w4%HKO$:I#^!r#=0 LK4FT2^|QN%#*18yi*EN\p=Gq+/O9j'xzɭVaOo(+i#7(| I{DZ),U_l }X-kQBq6Fn;LҗJx^M$AwxV9x9Gwe+6+9ODXNPa|F4t(P*T94-S$CY$h 1i%-3h 4E qdptU[Z+0 :$Zi1NzPլ,~=u [ =3Y">{ hW|{y'vG uEQg|CQL'g]$uxEaveRdP[9FvQʔvhi3uKn^};dw~+r ^Mg7U{W*n qk2zIQQ3!C(G֟QmHm !hx*!K8]LuTe[y6]g~!`?뤅P~9BQG&(q6?14sRAʘ:Rr^m7 FTV~( /ρˌ݅? lz1G*Y&"]wn tVaiK5 o߾&oa:Pd_tv42ٻ}=ObW᧿<[&"Ӹ\LUw#DWڲﰳoN|od?AV~.Bl mM޶<Ճ?-N1RUaCv:4o+EcP?YTN/XL.߹M5 iP{*g{4H{l{tm֮Jfz=[z9~A\ u?kRI 7E VCPz Ca !w 7P}PqA.jGgi/]Irg_e֚W;ETi˾"z8xƒ5<At.&e\b&EI9\(({;D-m;7GUKQ-ɣ%&!WE5%I ABphiğ^PVi-yBI*@򗨣+ DT Bms]uY \ NĒY0No*ZVP}-Z:o>xK}N <]n䪾"?wT _''uCxr'92{e)rOQ|5R''3/ m0V'tc\-TF y彧(xT<<޵y+")y س8@q؞v-kYr%%vwbɱ$Gn8qR|33`DyetlѨ4ba̿Sr1.ilL}Ozs0mIY4ϸdYKztzԽF"1ᶾ|JbЇ5giW,f,~veb;L1XE`Z.ͷYӧ-3Vߘy{WfV-cBܺ?o3Ogocz? G~_|o?:%KܥiMB x Ȭ߹?_CYgP9/ m0{EPV/[뇥nb7Y +Xy0aZx k=7-n>xC99-8k<_s9~ۗ.sDdHcV"86Bmwk/ Fn?Pr!}dΈ}f1ȵGJR:0K4"u2ZdV6fLTfnin>׉;&:jtqK}6㸥j TJr0^POL1 I41 mF.6d{yp%` G\k98r=V!W4Ua궵K!N'V%0(h,2Dz Q2FE Q@`T8bC}5*OA`oS>w> z,Wv;T1uhmmNw~:6ûRdE''> WՍ(xׯQg Ŧ:=vY[iTGTXNPu4S݁#2 0 Fz@ZreL%s##A&%^zKr{LTSX狮6pK 4* IpF, o#Ąk:lYD2%Fz!Z;^Z_qtDjgﺟ5<'+Oͺm=coP8POQl`| H,~sP`#征7Z!7eFSܩ_,-9锗9pY2 H%MǃsRZD[BVHHɸa]4K|Sf`*CV2pg2^JSkSUg-+jhOL?m`xXyP#{mχ`U-N)|4y$Ʌ:ļq=n13ԅ#ţϣK7Y S!Z6wV!Tfl%j=/N~ahe4h5:o5 *eCX <#S% ?9 J0!eePh8UOHUI8t^yhx镀Oܹ7w%Xy>OJ}7!vE2" n }sԫq6Y-Oũx0K%*ɬX圡C,!@u%$+COg QcZs.UU^پY`I^xvY56EgQn+ey$RH='^ b33)4%|ӛz{K&~_=|(";|](J@ |4h'U7-io4A # ]o8H_ɻ5m2~7)Yb)82` /LIv  p@l:6~O69q^MoNȦ7'a;TB2r0ѿ2Y0&if<BdZj츍̘`$jp;2l`z\謏@FrDlTgE0˧dlBHKyx%:Mƛܮ|?ivE Bm+V4X4W?z?L~nf[TC'~o.JG!*;5@v<|-SW ]u/hXAgb&HV)RBK1-Q8zBZy =o-f"WZղm-k[ֲm-k[KӲ5䖵emkӿemkYZֶemkYZֶemkYZֶemkYZֶemkYZֶemkYZֶd 6FxtnӍwN71AL ;gnӍwN7;xtnӍwYZjFm9n&d9n4Ǎ7FshU.O%Ya?9*PGdI92^89zFL PKd.NATvb/ee&yK &İI*,K1ΙI"))5Z+_H^vƽYޛh5Y΋PFTx2d1\ԅZ#eS $G}i.,vyE_RI7A$۠,# ļ<ٮ;-]թhjcWh+kD4bӈ]ʸR7heM6yJE)Q L'Om$A0MDg|@RI1- -iZ8 ȧځٮ'jzqLf:͒duM/6t_idD=hfH6Y5+9Ue(ڐnzqzd~j͎Sчl7} [Ce*ȭgq ~]Ŕk\s>ޏ8=+~on.3$) GoII&47Srǒ6_R+b (_#CB p$:3d;Oя,A'C7HDZ)@ |0yi P|Yri}ދ_4r{cYږ4]4xˠNFtۛpCnm$ٜ7N1ye9oO;x+,zxnxyE %R@}E_c ?"l-,l7|_q g1 [6S >/G3W]d9f4ngSsY4dBS[BƏ_p%9LBAJ$^cQ,g>Yk'2XL5t%9P@T;}Fr6jboay? V4Hy95hô !Qd $`}/ [ ~;x~dTx4gA(iA.rmU rҎ3)$cЁ:ҁɲjH[sGxwSu+JcCGGBGր{CG+r_8JY@J U bk-Y@5ptdpu21ƴx# c>ڒ3 f`S GX ӪhoKGg;8B8*1U 6s$gzϔϣ 0GsDYb@),Ѽ/syryɡdeF.f˲ 7hPΤ;}>}*ksq|=aٻ: "m[ϯ^ Euȭ+@hsեk3~bO㭇X`4\=v`Eн7fW!$XrNj7u5?^,3j^*y> ͟nN&gŮ,Y[*. ByŰ65͟\6oCCwR]ck,f6PV*#>A4"`UK-1ԏKZAԿ,e;w0K#l4`wS4f.39IgI{8+ awR݇aq'!)qM ;}/&ٲ(`KU;Ä'VF8{n119Z&-j8VؔDe9?tcLy#U]k, uxudRPMH.|hkD3\ o8F*D3d E"$Z C98ʍ$x@B9J'X `A B#>v;$z ߜ4J?Aw-,Ë -7C̕-e<1ZqeH 3yf`WV+5 x<DO9HMOTQRJ% HI}$ "1T mS*<>HN$OR׍'XT:u%ǣz^(1 T1j7#종cOԁd F ;a}^ ̺Z4l[ !)CwRɮgvidGRB<1pf;ŸZXqԧrk *XRd6`\:CHǕ62Lb|s1M-)%`w[ (5) (b0I@ZIi!<ЂkגkI%v`dQkͳK3%k>{>9G|vQ{JS}IeM}f -6ە!T!4x;+V-{mzh]Oz_#Вjm\`ڪ7@+].+@zdj[ eSm7vݾ)=yoз8<_nR-PtNr Ni 1^_~[#%gU*j:JXKoZҥ~\l^%5n>ܒ_mKѼ_VÅ;pqFxu_TA?O@G@Wa4> >4bnҘ8SZl7 h 0FMԇJgחH-ghHR3%T7[n]lL;Zl .c'TK;yjC@(L00|Q:[`@4:B$*20ws93'O9yvЁ<~ћ(FR+,31aqm5YjAgPfvG5NXuzD.^0^(:vFKVںz#ξorg|d{/9ߺ <Κ6mu!g^h rc먋֡0*O#VtkNt(jR䐤2 #520rJ!,E N&x;>Yw'kןSc/*H'eoO$=31l)c]Cܒ2uICTil+*Ep(䬈R.vNy `M`8M8%LJ;f2Rʃp-zqy1tN+7U}9Ҩ4raaY[2$&Îc&!PFBd)/#lEee_Px"ICk}8. åh2Xbɀ"]. ^;/9ԏr-cс /q^ݘ"sc@tƬ 6YIb|?8xCY_XSOp>;S<@$|%R 3|'$kBH[0 <y,![n5 2eH2TdXahT.EEENвWT'"(|D*,pE\%h)wqdq kL1@&p;qr@I1;(4VFNJwpq8Ξ_VٔD`銜uvsIg jɼD@E.Ekbq`Q`k1Rr$QHg02# -F\V%P's!/ 鈨3G䈶`˘0 ~qhw\֛Zq\x$Ƚ^ 11j@"zH=g;-+t.ËbI$"U:ڌio:J>. ai'MK{~3չE<KIo \qa'7k~cnN ̅ybNOfX:Ύv-ijdG~Z[e(ԛ\f/Gf|[>=e|c8buOn f\CM4 <-χ[^ =1'՜uٟ AxΝ60FѐS F*J8$0ԅEɈL6{+{&u5sTxG4:&(f,4(:$:l: A~/ܦXfl*2!zfmUZ9LD3&~zB&h#k,Ɔڬ2  HXo  0%{ #H0!EFI)#ȁem-tåb h|L=l 9_!Ƭ7(_gw}F_~O^_zw(8w?KARr帤'P[l=7ClQߦz2S]%*S,D"gf3_U*g^ݮb=T F)KE}CIRjSzrQX;fQ$Kq1bfzߧژh6쒪Rʒ9Ju:szZV&*ªTRu @bNOg ǦiQc% ]@y]6 J p9q {OSX(0nf؞y4M* >j3I"Yc}fڟdP$Mf5XV~Y2kE]^"NC]kb*a _WRkQPh- sHnCUGtzz2>Y|S{8ǮU+\%'|GqhR5T[4E͗I1w~CgU ݥo(dkʿ~}v4V^1DT "o lTN<)Yـ Bg᧳}Ta Xϟ/!Ȯ|IK{ bUWuw>sNRxlkg`7h}OD?X[U>悟7]=ІAF=iʱIM7^\>dL9HbOYZ i͙y,3M #G;ӚbpV} fP^K"u2f.t-RgUi@J 2V(N:meV4-[{W&vPUإ;ݶYnXw{>ulJa^iGjd~sVk/PXv-(1 ĀШmzP`*ש3?Ul|rD nCl<)0&.톟@PhY5,r!] %n~| ? ԗ:Ni2/Er+Rc?;.>SfjPlj+=TRQ\2ʮ>3<&\ H3;@i0˓jδTX]Ksr+TG7$uSMV.h$^K)i I5$E%¶{ uARbSIspvGةF]}_s*N`s aXVF XdϑH6r2kK[N :EP@.Z2P7@嚶#]& W#}c\3vt7v>ˀG}q;|[1v<[VhML} r̺ѩWh_&)"#R(Z(Ut9W5୔lJpז 0ޓ\Y,bMm!iR&n݊$r<нw+_uDݮ1weF%JQ#e٫n;q7NURκتc:>X7 Ѧф< ܄4ggB32!YJc͞Ќ*l:Y'%sJU_IcR(R>ICFq]} X Jc"` l26cg?]s8y|lTZdDgsCuK{i$D *)+ƭJ>䲰52E.KMQ͙r6Xj́9:H!x *"%lXj"i:s`N}F0I2߽I+GAFI AD hqhg$gsSM5^ o4,njie)e6TeXH)[j۫  j3v jnΰsLi^n_ծR}rdEMoJJGۼӻ@>Va˷;ɻ6n\=Czk?M秹 >s?()$5VL*D2bN%bVM5qB58UO5ئR;FTXI֌ٯaf8VƺЎ:_7%"u:Z_c4m碵ckݨGkǀT$T\),ԵH.S$CFjcR >,h, Z樳e]Fea.s!(ID]ufׇQ_'57h:9l}cG8jĝ%m/Q2d[=@b'1.I(-{ضQwH lg-5+kLU )Dd#.FXMVB)_3[_#yJlQՋ(F8ŝ}3)z0,mZY˛ HHZ(AXȎzzl~Zsч8}xvx0PoI؟?k\X(7F?M# k866_WOY;(|oFw#B `g:G(>(ϼڳ <fx5j@UV04"Y/BGED%LA,!,``2O2Ka2E7\_ۇՓy/決 ^E3Lm 3@<9@.dG]00[-h\F@QTx[gi+1V(NDZ&G%[Ruym$c 'QPaB:pyqHx`J Ci

AMo4 87{V -eTtw߭C˚9ǖ5jY3>@ߚ(*q}=?1!-`gi*U~)s*.ɨ;Xz#cù6`@I)r.e1A 2bҋZƔ,R{v^2*p֩O~ƞfŏcv)|8vyLdmQۇU[@h7J[|f6O, GG󯣻y9=Dv^O ȎGk@NvBa~⫎qP{?gOw7|!{["p|3>Ƭaoku\Ø׼ ^/cuhmns+~]K?W֞V.\iak( "}mdKx;*}F yE0:[Gjg_>J{>+84~ ;RtږHEa)Q*9-ɄDL^"HKiF(Gfka ޏwq +.wriօr pCphc, Ft@b (lA=jhWxT bxŐBA5R*' qk`YTENVQ0^~~h0 )i y_#xu첒{Z;xa++`;R T6gc$c`F%w{vTA*|3F>"oW^<GA!ԍx~2!OS n>xֶMR ǮX&IX_w ]ͮ'#0@$[R|r5x[G>q7ì!BPܞ VB8p-ݡԠ{ 6@vT  {^r[y9X/q.(X)L՘,l1 .QŜ3蒳*hG^{KzOsqfAy6th(W˘L'wqd.F{a6YuviG H(FDew_o_y[-o8ۺKJm_b96> ³U\}QP`FxѦ}GU6[^Z2kR̥s.,E$pSPg^eV+ϨʬnTeB(GAUXJP1ɰ(:&J>u,!,$0 T*y'y"Z))^[˘$9g*9l U >קgtٽL"4񗰘}ǡLJ|;jRGeEHK#G>M'M @6dޕ6$BӮ``x c l "##%ZjQC"nCJVEVe|ET:M5X5my DV\[3:&A2)},9lJ3qd7)I6cZ a₶9IbTD(*:HM&|[+tK]P+6XXP%I|H‡D, XF:,)w֫~atsvC1i#eNL0ꙌEaLѣ(`bLQttlJX{ ]{ӥFJ*UkA;u܁Nxנt Gu/9;' \vUBН^lyoә4 RK؍3ډ/^>·)^ui_n=y|+Q|>w^ѣGOK Hk4BgtI< z /EK b$,,I)S22h(PDTKCL:$mH )\b{ۄ1&N.rut Z=7|3K:ݾ\`~ 4Pǵ4{lhYȋlXQ3뤋Ek%gσ6Q{t}+";oy07 ;,7my%F+='MGO|twL}b!(ȿUwի_iSeeRztF_r d{M\^_ukCnz%e9Ӛ.?Zt0h:<9[T!r2RRƐTEYl]2#MIflwg/Bx6=P D I# 'ˠ5zBv:8)!xJ&ydSE%#|T"$k@H/(H&x-sFdsu6g+Yv}iQ}8աT~hNӫx,J~}cxM^Ƣu^y%X)*o)[++ ΨBhkP A{h)?ڄB FI !%,&rR1 RbSvi( R8[$7*Ͱ }c,:u]ˏ4lßll<&[J !j/,+&$Rzdš g*%6klb- <1%tJJ l3} DEH&T:8;M-PPCcj vmɏdHr E2%S Y(&F>kMHqJĜFk:epQ:E*c)\lF5yxLxː Ǯ#bq@u+^X\uӉ}`c4K]:dY` eDܳm وmcFKK=ۓgRN 9rk lI+5xiP%l]8[ ĸ8[d8$_gU#..:5E+\pqwf=lC^6PJFJkGFɈ?D~}|i:nxhh<#7z\Hܬ%4_)iڗ^C\~pN71ڍ;S| Eb#3c;Qz60B'҆(2D d UQ`j"{0)c,B#S"PA&c\IICdRSfJ:3BFyc-C)mF"UdX1 h$ "yuCfL-! N\Rc=ct|!1ήӒ Y=ߟ~Ik~]&?OJ{#.Y/zɜ,Y(YGڄ{cwsݟv FGG>.޵W;c2cn-; 7X܇ȿ8=;/?0oPg4* {#{KWjqz-j-Ƹ<-?pKVPZ;k/K.,S!t5KKX,y-gjVnJ6; IWZDu@V>:Bg yP%!kQKmUOl>(mԿe+]{ׁ/2-͚õg-"^wR`̽1z he22z L^&C/7B}h{@WT}ڀx\(.Kd|)7DXԜ=5_)ªwdi-2eqI$Ml G#߮,5"!N\RsD/֩OOOXz. ^_]čkEMNT"a?X*[Mytǣ|_|ʷF_dg7ᾙ6k~HrX1"aŀl*! cx&˓<@J[CZj[- ^xi@*}E%"K1e)ؚ و/A/'qg8] X-uAjԨڙJZh30F:?8pG_kf)~cy-vf r#] 3/xrKۂiۉNb=Wog-QV`>ګT60nS<0,>qIݤN9,CL{cF2*(qe f':\%{ _<%/] t}liy{K8hܕ=t.O 4xܞ^ ܳHeJȊU9]1y͗"sgpeO^;m( *D 3 WtDPMeRLH1.jOo *QGZ &فEo/"g?Wq|>IybƄ.NGc :2$q֚B4uRhDD<zM@@,݌.?^Jfkcnqr{ֆs8nIC&+eBׯ(y&='Iъݼ%ssB]F*q @H ]չV^KTK7luNT)V(L mvlu' 9fPcI(HHL$i`jkV*93&%fw& AX8YJ6LUߦ2M5)T%#4KM>NqUm=~5KoBKT(Nh/a^)Rbn}1 0YGw" |;/H|m<5ShIkK.W>ʒȦ4g˴ׇ4EqMI˯: e)0LJe-8"FQ13oERlr0Z|ຝ*\ $P($O*G">$eTĚZlR谤Yͮq؍IǤ]O#ɒWJ|ySt~n·Q ^flj*/ ]H}JgE8\9ɂU!Av3uFyf"F` hL :#F }׉%toVƠ#s@5]on 8%_TKzv ƺ2g) \'#oϛ2:P%2=v> #4Z. dQ)jNQP΀3gؙ> ]덴dYnh"THJXbk q(I ʎXSZB:A )R' 5`!۶hĘEQr9@d-hO@} iǺv{.f֍"h! M|N9˥Ak9e(ZU֑ Mw;3辙AqIDB,8cO A R&(Kp@@F+8K zmԎHlm oD2!6!#bISѽP5Gׯwu~ɯ^w'9ߊ qeJL[=?]fP>F%yY٪l0xPW:R:۳}7], 8)U5 3qx2Ub9rXnjz||fK,|7Իc_Lj7Jrx[Q@P~+c%Tgfh22q043p2 #q)Á+k莲E-=GҺF)Kプ42 *?f5e7v%iƆŸz$n23򛊖iد܏ScYD`jp\.-fп6$Kf[6|sa~RtTaW˫W\T9ߨyfJ=kQ< EI^"Szۈ(x~ts?o/H?|p>Կ49G<Gqx|۷LwP%R՝0 ͼ3Yd>I5>>kX,R3ڬ hw|fv0FpU`jin&gc뷻~B!VD<1+ö()!!Bp.1F*WLJS p*4wS[e&VY}L ?BEq8ɕ>՗-}6%%Ӿ( c`Iv3vX|rƎ)8n9(V/FGivL!U $OI-3R W` OFT!M~OчS/</ï#X_9 0>\-p~Un xuZ c䋊2iQtV|V7șQ ~l ~PA ""eUl6Mom#W=iqrɋG%i7Q$]_uGlק<-?x6},Memo[yo_(䧤^ P.,{kΙܚ .DzJ-@ LyiR-,-#T'tsSQw*Pn :(A +\; D锨ăCx ZHp{ bMrpQ7 }|ח 7bLۼR i;L 3@[z?zU-Hv/o Gb2_x0qJy-uu8n;*a4cT)ZTk6D+t"Ϥi&T&H[ XS<nrv3s!(\Xύ$oJʘT&H@xF6^Ѐ?BSt`$"OuII2tsKSv fl[S1$MPP1*&Pk(eL6$6Tujn5v?OW#NF50f?%#혍@ k":Ѭ=FR|6Sr7TduHE4R9XIL.Ry,U1%Tv]Y q `0N˂ʦϞ-u qNQi+NܑfI%uͲedUBru[@Cr62@TܫЩ[R!rTHG* Ң>*lj."z52h*I gVI@Z x5x.X&-Xi#1HZ6ry zf"qn7f+Mƣ4odD ~D!X iD"#F9jȹ}~ ;$WVLߏZv#Gb%=Ĵ 虝E%I|5爷*!NTm (,Ma(=? f޷8h2⨪Urjbt޻ߚ<7fWЭȵ_$(H9w%X#KcC*1 ewnX^9mۃNr|`z?/P"MGuCY.G*9[M6|ұhJ`R!d.!Q:El":&#*JwkZQ+CF\Tt1HKcwT;vTs|.yK/f*qQAw΋ 7F'I$ȜgN6ړddu= iQN*P(9F RѶ<"g$AvrlY m #AsaLMLdR2 -xu)۫/|Sϥ"pi1Wv6IEpNKmIʄ=ɰ9S<(Ywz؅8z:Ou>WpUVc@LdmBJM"Pe-TxJ*W]1*L w‚5Wu@v3uFyf"F` hL :#ƺvgvkߵFCiƠqۣ=́L Nvw5.£ga }qւ 0(<*ּK\K75.b1c;d|hG Pf7RCޢR*<͞#$7kw6S>ŧaCZ9r;jM$eqJ9>r#][rE͸z#Ly!AAF}o`1hhGEvTFEoC4 ]j.꾦V>i:ؐ M]UM՜G eJNM9[aJBPhc"1MP )K$)P5rL[#gCZ- =`V2^}]*`4YŽ|AS%_GxYfb>3"l5]7]Un)mmLJF9".W6]^?>ec[Wϭ˺q:5N۬=z6e-fn zkz?T<]my=?3k:ng\ucn+lӭ9w66O~VKjP%:|G6ԟWkqQz=Sʘ/SjGZ iijϚ Mgv݊wO`!9\=0 W(8TB<kuJ*ry?eFPz&kBӠ[گ{U>I įM˥_ty֖UP%$hiP8bIksj@)$1 . * IHm N0@0doaAsa͎@ v2i, iU0PKP 9438ȜIa!XslԶKkl]Ij)//"y%xp[R7lޞll^bPOKTf'a/%|,o/QMG@bigWuvg[8E_-Yhdn?ru'P[\|6'2^!n~hΞZ \4ÿzfO^QhڀArE밓q#Wp0ixubIއ{@}pvK Ws.7"gٱc㡟u& a8N7?8k"p ]_,e 8SZ%EHT#1P[ſ`squnѫD(=bE_ ^߻,Q")5s)=Kzަx4N؝{rYp):k{zz<痴BU6oۥدf79ww L٨ m5K}zB"̅3;5J JJ7X4I9 gTMX`4(/qΗ|o{2.z|⋥>L}38OVt|ݹQ-wU/`4@4wZNo kg[g]qOG p4`>41IMt($dmALeo`A&MRR/̗ |F35L)2e;gǫ7IpS{<#HJGaAt@Y vFG\Re4PZXGPc G߀}bۙ1bFA5Zak4(R B<32>2@VB/KY9{J]MxcƔ66eO3݊?7P>kF\xc9E)@߶'{szMgTigL!>/Ӧd2Ôz̨TTEL3\FCw箪p`*sQ ?&TqPjS0^pSZ!9ދaHTFbF0y;c>o pX.ۦӿ4kNc)Ȅe෎ 6ۼL''˫K%ثEdt6j1|YñBMD3 z6g|":6?nYhXT`#Nj['LI8!9"a)ydX0)QZe2#MČ(;-(%:cV3q޷S8dӨ"&(c5J|@WïJ[z+yߝ7=ܓN+rbk?~^Q~^Miڍ"xI؇/>@I6/n\g3xa(vpw;?q@#=Qn9ݏJ=U% a]TdTR@Vg;!I\Br6J½E+i:믴mԃ<A7[ @lhݤ_'E1_=]ۗ'XNwDWmxfxj{(^{ۘN" h;FMJyƤ^{7 Edzc򻥎1g]:n0Qԍ~ɹ`1X9_](ܣY[FH@?,tJP5a{8fSaqQf˵^ >:39Ȁ1T)b+t@Hm2Z{R(ʍ)mw=M0E" h6;?~aCLyCh'>TQQ> \9xv^ Y6}.Ѷ2{du5J Q4 E戢.9f)ͱ%ނmR6J[ p? AX`RM Vs3*wTq <) Qyi%2`,i7L\ W=3MStg̵x)seXӣAԝ)۾bP:4lf OuFys&f^+kS1k"QK5fϨjgNj%#gjopeWVٓ C|gVa0e*RYK-c13Qelx LqˀY,C2N,Vj6O,8k7A0ɵ r B($RMf4j|5Sr@iZ+IoM4/'`4^^Oxԅo}3X 4`oޞŜPwՄ$OT=s8%qó-o*J(5"Xb-vpe;2F3 7JxB` G+g3#dDRau 2NͺJ9DŽRn&x9|)J#"( -J)#RHDcYo ]HC;}dɦt`6⛾ՖE`y-dKaij ~!0I1!HAk4VKD&ȽxpXz# J46+ܙ'ܫU-fQFhu*KfzGѭ"_|ZEYdEpR4!֧ S,1a8q c8iYڛeYWӘ[HD`(3*h m6*R,%! j`QEL+x!z 㲼{v}*:EQ+ZrO%gLe%:cK}L.ɠBRK [@} xgAR'}BHTBS! THB( H\P6P1hxhbP +y̝v (kPƽUMf/h @wq- r 8 SKC4u{{c}\:?8yx0Vfr21-^^e_(U?Jφ9lPix`,(U2K)`)0ǟz]c+8>*Y~&{ABS9(YM&*,i\PZS./TY=y ؠr@-K(L5rSlNν:riD~(.y.uI[t't$fl`{]KuK./{*=K:g~ Lsx{IWa*(SZ$/Îv)7IQ4'?$?F9 _tϿ>}wҭ2krzY#f?=Ӓo,gaX~,IQI)%3ZX&LhR47c<,Z4~ѧدBOk|ZKخ0..f*Q )ւ]_)wysQlr '9P\I)4`++6t9,Iŏ[m&0Éي2bcbh.5+A)q2Gp%^$SRSE.y x$ )[X1c@6$5VHKDlld`>N@YOƽ{eM(cNkCdAsa 㸤ByA(,{h?! b+ jxT~8!,,I(uaa 5+P_ZPTyA^'[^vdQx}$'aKU;p(Iun<9̰v*+{kޔlf޾Xd]"t#$!HK(g.Ĵg#chRZ*`-J%R>RL@M KX)͋ ^`A ƜB*dFzɘd8c[Y(3B1Eo넌7+w_;2pM,Ak9#9pB5JS8,3FᅘJ=e>ٍ QjƧPD 1 "n휚Bc Gfb.Vlp@bJcF*]tZ m\5D8hJE|cѶdǙTRre-EOlݸj9{lsxW1 x9]'SZ%"鱸H_pqx < °*J 4P*F#Xՙ"xBnQ^p1pOaxz~˫~Q#w>Ϲ5{<zj>NkfneKL}V4Ze4c(PìDJM T6!I9N!Zj7/-}-?Nj61S6~c]cqR!rj.޲kSҠ s*l8 Kw!QV5 y)rH9ldǔ't|SͲ8-Fbt2/^m{3ڵ֨_{"h;QSo,Gͫxu0dQR ) c-07YӢu޼WE >磋0w}gqw%Zz:(w_?z'̬?)7#R忼>òZstm:׶Mh_3BkHNampfmWjm=]_q鹟 ?GOˤ Qn0g+{YT`%/=p.\a|>-+ay.pk-=yξን<#W_! ʓn/p-•xO;NCm\>yb\Iۦ'B4n%GUJ./p|eջiKXU/{xߘ{Ix\P\+Mz~C[v v

7xMdn~ A } ns{_$[=>ڣ ŗUK{5,w`> "%g6 Q;$5>nO}m[iem\?U gW'Wҽ7Ww>R;TyGi||r >`nen3:nwg6ǫAgO]4v 0x2]^]r1w}h1cgoޚ`:2ܹ6ORzVNx9x.+>Z(~@(Ԧ*׀plX>! @rkv|ZpL> 5c'znğ/욯RyB,cdx÷ Pq5[o`K$]LҜcRɲL欣uT׻.^$(qAw:.߮Ldz_nuM:+t\- H!N rPfm/ &䴯VDÅ:fkU, ͐!)5'C-5Qc>asB Ir7Zz[8v`k ~SPXj(u/J\1*LdվjR-$B"c{. ،ahl976eZI\srQLrr-3CE6$NE ΀ jb!#)E4P&RTa2!0A+&YE6ּD'13e7{F&PPŏ$鐕6,F2kX(O@ɐ"OɅx<Nbe gUo3[SslJ̱6!9UQJr ^5f\ :EtB0aב}ڏ45x7AkNԱZ} qu~ցAۈ iY9k% 4X)PRF* CaRR цSTq"4k;.f:ÇΨL?f27F LEc5VC=n%Ġ:*D{I.%醺0i^&F2UszrrNjS1٭P{Ӡ n<a<^jvl7[S259Cà=MPAA.7RaLvIΡ8()9 KJAT VdC "Z@*+>CD\ Q=&! co7k-ʺ@J2@k. %T0KէUT "( :RDB@BY3Z*k @IvQ(&_EP9CB+|&LBaG䋾~ڕRYsɂuQم*8+H'! (}F;r&^Xuiћ7Zp 咑|!Bw x n|j!0px u`yP[W%%˱--*h`1yLbǨ 2o, ėj,$tJ yPK!2QӊC5Y d\^¼N0GcqTI}DPVx PQ*^f,T eSUߨ }#U¶BM]GD!v "}X@wv潼󨓹HS־,K4SDS1h+CuD!  Z%! paZt ((BL(b,=JPAbu Ta:ࡃk^BP!)KI`s)f,6Ao0H_,⪯[7 tLNBi)OP  7 EL-By?ytSQA>&qj %bl!'r(e[J&?XWHzD%P h2˱oPJqMSVT! jP[A AӺ%PP_j[ݫ+0LCTj^`4EN 2۽9x,\-*tez- D.nnhfѣAPEm3MwGb1jpk̦k>hr8%EGK64N"<5?z(-Pk8nHJxKTrX*1j=#F@ MJT+!m t0X'R t&*M=<雜z-Tت0-T( C˅[QW kW!tr5pgC~{B%# U C {SCaLE#HRuHȮ #1h @UG sOcK6*\1mBՀk327)7HZ Bժ&>$[EGg@&C?PٲOOkyϲըcנ|+`oQ!@j+v' }5Aiyc Vb!H ΀z"2~_H-&pJ7a"Y zq4fc'Ĕ-*5X.C,2cR`}SYƛH0y{F8lIҶU~DwW,"DH=+e$G]| .?_=OѪ}Jǧ{¦t/,\Pu#>u:*#`Se̵Spq-T+*XE *#Xe`2UFV*#Xe`2UFV*#Xe`2UFV*#Xe`2UFV*#Xe`2UFV*#Xe`2UF^odB |@v=JQ @@@K T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*^))ƞk͹( *V2TJ%x@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P z@9?'%(f<%X{lU J !E%kTI)L T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%kR=LzCjkAI5-rr}P wH)y0L Z9 #\sw A3QgDՊ U+*z.TXթSbHվ Vֈ}}8‘ /Z*pXkG[)Npbվp5kpuУ7BS2K5B5}zڸrUg/;)={9冉@`h 'ӦfNYQZ1ڡo/¯ej)Р(ݽ3v臟Wtp𿷃^m};pD3{`8I*kH,٭%YTM?_s2 z?ط/@5yіA(0 m?nnjΛB{Xz텍Z}Yb$VVKQLT⽜-׏zޥwguxlŲ&!Nxc1h 6Q&eFx#F[*Ui9J!IɤBl5oʜDQeU\@l奷ݬ`^'soHAψ6Os9bOK;ub_[{ U1אs+zpU<7WWճO+0X2{6pUgîR:\+k+Ab bSpJ z;7n4.n __Wo/wnԅ| [Gee68L<1.zO' Ͱ7 \ VT? ,Apx}?__63l|߷p ˋ]i:{D?N5|W7q, \ӞF2qEFSf:lsicmN2˥SAp+K 'j퓂ݻ}>}}ѩ~[(L&7pK|jkǟ'37ٻMǿ6㺻n~bin~ۋe*h~-N(~j_Yuk]1率wQ#|Q1j> mûAy~WfI^H. xWA+X31 9?x]a릖{Q4 ؽm򚔜6%CJ)CGc@pKDY*GHE͜;T눓H 1N3:헛lb-s Tٰ€50"\E嵋n#o8@ pUt&Ю,aEeH: 7FA Oσ_"f?װnDY$"WhGכܣ|4lEQk,nhvz"z!%4]z Ӛ k*WakUm,!TיUĄP I(ejeˌ9uGQ u;-'\mY[&)[]י:W 9+bh*xeZs Օ{X2S1AB4\QiYQK -1Cd% \Xnrsտ-]U &*nW/*U\[0v}U+aj$LϙTm|?mIXt+댩J"MqWB #aE^2\SU2'hǽ“yȴU)(vZJ~9V6g7[/⻖FRAz~9RlK8  䈣:?_-4ظ8";z)vdgT LMbRXɩW/V._Qzz7ڍ+OJ.jDbJ8+0gmBv(Rvd odK| /i ;at׿IRЇBY0Cu!0c9E%)!^Lzb!{?ܤɥ>asusiD8]/{ϳ]g8EӌA$Y-wr^1ж# !#mpXdq^FRqxw(j09$Lՙ¤2B;NxM Uk#~:WM72C$Qq0R0nUm-ҥɫe9cDF#, Pzj,v.#`(䬈R.vZe MspJϧLª1jveZ&R5fY5^oC&I }bi3  Κ(Z%&1v5 2"Kΰat&||CI;Uu\iy̱#2\*밊a0-P~B6xјS2kKc5WErU0F>un$\ljfbt0j鈡b0mypX?o>dI>ЕοUh+yp=VLnC 8kMο`JicT'cF*=9XÝ+?ڸd"w]1]_2d 'C%Ԧ\9OQ2S꽈AdBFbF0uؙybYhFnd<HF, *L{0(EɈLX⽕JA }:UTxGDJNc,{̰d2CRVAyj}6\Xd}y?v'yYlb0fw-;w^*FP}S\PJ2ww坯;_/gq:\~smnΝI-ҋ/0} !?GARoU3eAswz˻)|F PܒɳyeUPdC| ]f\R^8g$Z{ʟju?ƭ)e|EgOɟ&q>D3fcP$L{r er_gr,O6o.QLM5wGYDbdʲ+[ǍYt^T`ӓO9 ثE*l~ֺcz 5UD)(~TVH/G1GR|(lvt3z\UsV_P/SC`WBll>\.XJjysϦw" $g 4G||Y;atzsӭŖr*Z̪@&k}gʰ!hQﯭ6+ks߬N١V=m-FHbOzr<J~s 5< ]k=cl$o8S Ay-t,H!ZNŢUXF2V(rԨxbOBM6!JF4tГ y\N'vJ`^VԬ5vj(c.07Ԣ,k8vs,oh6YGHpZr;AfxlvIڙCYg6!.Pmeu:pY1F: lp6HVTFZ{RchAT\ J\|Qd*AIljz'FWP6do>_n6nԿ8βa =.꫖\ߺ%Ѿ̉ }@uUy wnꭂ1 UU0&lP3P\i6Z৵R RIʳZ! .OPԸE7JƃmK.D΂繕Nj0N-<&]Y6\E)[h1|?/;뭽}PjJKg.=;nzU뇌Ouڔe &A}0A6xFM3ꄷٹS5|\.- ж&̞L9jq&Ó# ;]pO+>%b+Q,{zQ0n9;z 6Sk09o{ lIyF0r%{͏VR;R) % rQP&RVb#gb۝G%r ay%Uk!0焣32r" ¦n66o&x9|)J#"( -J)!RHDcݍj#nRkJ.;顏z?9hr!vAua>w>UDv1n[/~ Gz9_E)YxV?/xvYdEpR4!S)RF81JEǀc,زZ%iS֭$*Chm 4wH6`K~ 5 T!Swȹ_o@01vZw-) ,dYb(ݲ,Tq^V8-1ɻ}%5RiyV9 P>,HoM&h dBFw,T `SQ')$8A@HǠR8S+RNq@qНif MgqoT9A kF2D9vsRЅ,!|{c(c\~Qȣ? [[wy3= *f⪛`@ըi @%GMyi RI=WW`F]%r9>u*QPJ: u4FJb}(*QKUUV]=Iu9UW}R^h0Ay DhN1).lQnߣzifkf֙\<'*^=]t>ݵK*ֶJdFQ6 0e[ 4?frtrJƽ4$9+@DRrv* e0ƿ*?%R-޷Σ~+Q9\;ޕ4*z b b#_>V\1Mg?wxU9f}v3vFۼ-Z]V K1[Kɞ OkC5Gp$ru~ jJ%[@5cS9l 6\|}y4{9sNQQLe SNOL~oXa6`qgxae)ç P jNh^[  Lޗ3Tu(m , u@;&*F@gp|<}\L}ѓ0lss}LU%U"Vgaf36` _&Aɹny!ZMؔZ(ڜ-rm| ڤdCT*o7"XgxLnFܓ'r*B D -nKO`8 u`U"wa)QQjTWAJ>BU*Q4aE]1$0fJju( %憇D%az#ű: u`U"WCQWZJѴV]=XR}H ᨫDZxtTw$Օ$H ~@J &r>uw'*[ՓTW0%?D8c0QhPHՕ&RC2`MD.?tU?D![uͨ+ғc0&{WWL^_c3r]_c3jŞfTʆ+"c/NޒsR%WvqYvQ!Q T{z@ B,{wz9чEw8#DXۙyLO:(TB !(gUzy-ݗyP{}`bXӅ21`$VKQc`pI,:nrqwY&F:ZKoq%A0Σ(Q{u5m &eieG=t-#e K+ Pz'}^+cw-:?kyOע?uv)0BAFe| @ 6xNB`^~KdHDsrrm4D#CPnrzb,"{hߥgM:_SGWp`v&v/$3$3UѬ"uj5ٮ![VvͻަV1%3IQb%p“hOIH͙!HJ^i($Mip8MeD.4 |.9"0iuܨ-Mђ(zٲ0C(y2VJ %ee\B6J Ѕe\0Hh",(TM| Ce]j[X# @j j$x0s !.HVn$ÂrgQQi! NᐷjTF,zs\qPGqz "1G; w=}֕r#RNKq +Y@@jPnJ:OY.65-n|fj(/{pF UɌpėy\ N>~\ܥDNʏ^3![=Z4,s "D4 B$ RR :R^'T]*@=ƘFxb=zg:u RC9/  ےS\~~ĚgLT JY-3>^)焘O&X0Kͧ,Ѓ3?`A]JmRk˭Tҟ2 &4Ud ؅R[ eV~BOāqp'BOdO'O3XeHF按.&Ks7^XA-ђsׂ3e)cpJ&fZ>罥BehuSֹp/6!;-+\NlpOcEqi=`{i+!A(%4,$-Dމ'K]Roy UwŇ첸VRC go-,"P. hgZaBTg(d>hF#HNeȼNhGI %9-YR^=L0ϣR $N.eV!e j"Q={&zgKĻP'?n+tH. 4: Mm^ks\Zl>K/6G6D<9Gv{#5B4q[쎉q_xޕ JE3R^6Ĵ/]8>d̘IqKm &BaB is0VEؠb92^^.)̮O~c]ٽW6ڕ m_q=ϫ9Y}g(c&3eB))OyhGi?TL!RX˜Srp3^LnZ.Uan.~9ßz|Edf!DR!Xbs1.CGc1zm4ğ^iPh)mA'mp"JC\v2PMX Xh5njhNrḫ^UuRq|2d:)ITςDzzVV⹢N{9$d$2y8 f:dSncmdĥSAp+sՓ#!KH8AI!NxJQ),1*g3H eRnYJmU h1>8܉Ë*$;-~B1J`X?.`| o?7o_}W|oED Z'bqq+;#Lzo+_Uv e  cW:4o0-Bj'b~Fw& ,5!j?-Z]6_^%V'| T#pcD|;J|eO >1lr40Ǒ.C[2T+uv6̰=vMð]:܄6"ĭq;,{zuJ>ƝYq|Gmt>)ޠe5'eqP$M5XGR~]2o꼡.d9pZ=wͤjԚ-P6" B+1]0ҙNgǧ_\4PV|Iت;݀,XQSbMw|1Gh*;Y?|̙T#~ e\\ϋQpWMho(o#7&/&/}%[N(jM(L? Ҭ;Sſ)~u/l_ G^>;\ffk >VjPWaG3X[0>ӝ] g{4!D>GAKN"U@L~ *a-2)@؞z)+mhG|V3xp^x TU?6D%Or29n %u4qZTPڨH$`T䄢* zcfނ*bu7E9*V C&8P eiJ8)o"W|߽/ջޛ_%þʉ$B:d^ヶNzUaS?\>Wա:(=TϤR1fފf{ߔT+ۃd$umRKJQ^Wm8LHGi?!K?s ѣuդTܫ)I|*ފ^qZ94--/sjR}}_;ӿB{Dx>jP hR<&<2omҡ%^𠢷$8:8h [^Mg2adla47meKiw=n)ےHjDDlZqH3<6h$~xߛ7# `-aND)0>>c[pL~}T֗m3 ##E S_\$mO1|V&gx+&D{ BSa'Wj+DsX{i[k7D9ILN7%.BB#;*#nL" %Mz_5 "x\2FK5V:v%<<@nzJQَzyA~[ )OP^9oGGF>AVHo7f˯~Zx#Ocw 9OW\Nn?~n?pŞ~IRru xЪ+dJ 5Uuf U-eC~>Ynԑ匑բ'{o-Ovö[U+t;+e]1[!4u?n$/^JOHJ(C =j~6Cԣym '@0EHbbuVJLFT50rM=>U.k=ޗFbQWHfUmraUpy50_ع!TQ[ @@)ΛUB>Q&BɅnB!DB"WIf),*KH 5DpH Z`M P_K ||E0sPG)J6l@Gbd9~@a*UEz_V9.|pǧƎac62rmb_=_N*dy7S _b@E|\QHـE:ܠ18Fl,bO=xʣl|fUЬ*"(x$;p5QSѵ \Rq+D4U{b=G՛ yQ6?e ,xu؛_%lVp0^)ɷ|8vwtw㓹[7w;86"N2G%!*BZxC|UF>gf>Ix Q{ IaِV2)gmKk'=lh5*i0PKNJYWcg&asDL\\L,w_S4^-~7p{C{K<\+%XBqq A(R6< _ h EyGkL,o<0ap疋|HtVJ3ޔS2`U J8CC6.mnU% ]͗z^Scۚw4^<+~9?_4V<~a۝֑"*Ȃm;Y[$Nک[d{,Fd\3j|[aX }Dcl j !H`DZ{PxQNn 5?h+e0cL3QfevEXVzVw>J1X.:AŜB. ٓlU3&Hz46J3|hr\}/'4Հ')U;:J҅AEG]9Q*QgK^LGҨRxP>NS.A&HEG$V*d &bN;q,9wfd(,n<+ +zv^ћ,cw]rw{;ɫep7j~$8..߾N>R_2PZ-khPW*?E\ ^i R8[b\u8I)\K\–l:TdMdϫZƾ,#v>) o]|Ջ|![/:u/~?w5&6EM*,*%%JTp^Y$h%[)r3QP%H[9 f2p*F$c"d2oNl+v*/ڛJɺk%abNjy1BZǾv#Sujw{_OCmruUDLNJ {zuXPDi9HF9E3*+RtH""G_ %b5dd.RI_+fc6M<\ Ʃq/D{ ƨHQ0k)dYjLHYU,E@RuZ;(bZIx9sLNS XU\JIKmUo27vlm[úsMisV'"{GCOb,KkJL2Dg-! |v.>'33V a?>n|X># lbj4Uއz ȍx|H^\\ӭ2X5F4j!T(JB=hE*T +F#Cg!q}N.rB}q}]1JVd 6蘓B%U$ 6G(t`< b_`wMbk]_V}O+|6vl4PSRvV(+YVJa PcVzuQܸ_/9MrAg,JZNb$l4fŔvݓۍv̲ \[zzUkк$nf1BcVʍ6Bc8XwVpj:Xep2ځ 7++A+V;}w*:A\Y\C"Jfprwj:XĕCLC"`d3bV+V&+R)E)y"QŇsn5^$~X-'k9w-eXW}/[-?8˯Ԏ^V3bshL_lzE~vǏS}v_~vͶel_2-ۓ` \[j'oX%cԖ^ ^ʣj7W;5B9.vSԣwj7SÕWj[/Kgݶ<~5a#im:w7[WXߍlqq:4<~Lp*tEm{|Rh1"`#l{3qO'BuRǚB@wjh.?Ck׮{:Krݟiߧ{(vqcnܘs۹ֆwv ihޒ` {rh޲ZSҪnoOޒsZ5+wbOWҘĕ2^ XU: Hnb:NWJq2XpCb[1V W+Z)H\\-[ɻ+Vi;NWo[$1  X4STqu֣iif  Xq*;NWIh]`)e3b\ZTS4W+ﴑ!\ W$ׂoWXvSi3ጝvwk<ԾZ{$\M6Wj[\E`Wʟ95(s*Λ!{~yq^1z/aԺ&~xߛ7;ߜ}( Po7Q0P@A'ᇈU"o6)-*"k %]TD8Ay}LKi{}SwݶEH1Rk-rJIs2`Rܿ8lPNr `+*;u*V([Xd!p%Z5+k\Z7T*fv\WkmC"O^vD\)T+bৎ+Vtp1| 5++u+bLWrctp+4 _iW,WVpj:X) Iv˵͌]ڧ^Ux\Jo:NWNhcZ"jg9Vq*:A\yapEeҵ+V'+V x&Nnr+XqpJ"ִp; ;[Sݠf _!LTZVw-MἽ)nЂ[fq/@ճ'N (5vvijs͉oв}3'$؃h޲\ [V{eaBH X4kTN- R`5 fpr=ױZ=yw*:A\i)@CbX9Xխz=u\qI>qB)l]`,mWM>$^twu2DCb`b3cWN>UNmTApe\iXfƮHjb)⊚V-,عfpr}3"rb "R+u]Z年+Rz62[zs&Na?c&ynj53J+&AWj[oM]Ol Ri=#v9oAnxlfyb}aM[wt7S .=@97{a,\+ժ[dVM'h(: >|nrmWLWRq4w6+wriWTj!;NWZ[3qvӻɵz5u\JnAp(6Mo]\\ZSDqu2l]`˵͌]Z-&?*v\ ,ZPXpCJP*3u\JcW+gu!\`w*@uzbSĕG/ Z.>|s#\Ї,ETFE0hl~,KZ4@{- \I۬wHZ{^kT+mXÙѐh| q֏7G1x(aYQn'O;e_RO7G\_ |.JqyVRyHq4GA~1M^r%@e^2~~\i'?.M4 In)Npmaq?K9*eOZԷˣ:JrJ#q?B]q|֟# /{bP2FӓX&"2/3yr|]2GEώ_s4Ue[׵K/M+'DjOܧm Y%ogpekegL5cւ z"jY2raY]鎮n=OnYz$TƳ(%d":ͦEO.[QƜ wtA(#Vkd{t#wmn5-U---Դnu4cn3p-m ]Z~ 3䤣GHW\KH s NOҘ!] mTNB¶֫Ai ]=B*զ֬= ]!Z4GHWHBe U-thMADic+m m #`ٞ5-th塎ҫ!]a H6?\ D]=FV0&g~7zpukbWV0tB-:j;]`F @1XK o]VPݶB ?NQ9옞^4l6MUkΦjTu8JQ{}kfDWzhՁvR7,ijȦ3o;`([DWXU-thyR|?Е65tp)iuhY+DUGW0rIޕV5tpdm+Dke Pκzte XVP 0g$Aܴ֘CmGWʶ)∆}•qnA@ihGW´ɺmt(ҕREt+{ÅU+Dٴ\;=]QSrL$?ՁpƼ&ZsgJְ5NWeَn5ѫ8=cbΓ/Y|r M?J!./]\I:Ӊ?a6m7R۷CS~ŊvFx .yz_o [-+N=3ZoHdivpYYpiswCpy_5-zԟ.}s7o@eʬ_GCd*u.K1YkMN__ϲUMK(Z\DU% > _-n%. 0ߋ?^gqyIjyQ(4jIL6!?bDlhdK$d6,T|.+UΉp>ˊK\ֻnb+TLz5'Rx~֓b ^Xz''fgL4zk TDrf()g %Yu` \ۻ1n?yQ/krT=2ϥaRkb<3֋g2eT!2i5e* 0=6h()ɵ`[u,L8 .^^ulN%k&B`9 9W3"Z m^PDnhpA#:2Z(v6\]}, }`)뚲"x]Nuெè&g)e:ÖPԪs2 ;iiEbd\Q3x̉h^(0)y؟^FonjPZ h2~6aM6yQO8wٻ_^ݳ_!x߽I[7UDg^V ,[L}X|3x(og ËďMN$qAt("1$nx`r!?b3*1_[g@|qneh6)b ,mQlz:jKl|][wwaxc9"&o?G׮*ŭy7?|]W.zꝠR%07z)bYԸ :weϷ0~qށ;)6W0*yWfaDCyMZ.u2E#xVKX4)!4eyj1U2uLwYok ^mJܻLd44\+b&L&lfFq%1rHJ{4w5qL؛L9!V q\R\:o7RnRb(L EPtN_ ]Vo@olzKpC7(B'3l")fчhbCSLK=X*IkYTK~N ,e?g'Parh6}J+ZM{gIlzGZCвfqZP[:&E&ݦ<|?ĤLhO!2UP[@7 FBxN烼F.m|oN6hI}9,`VKzc2dK|2Eyy)-*52lViz'5vһŸ-WQhSM3Yf2/X ;8 =j0b"7DimP݂ |;՝xwyA< 1ڙ;^cRY3wM}ܥj!t!rڛ3X 4s|J=Y0H9Ϣ 3k"&C5*XQpQuk,n:K./Xh6|  Eɧ8LPO0cXxI4trseOrЗcpieFwTݣowpۖ<p:Dd}j.dl=P F9i+? W6"NOER|[,K@mq7LϾTڹq)fd&Nx xe^ޗ]uΫ͵Y OǙ)8/Zޫr8 E959i1,QjQ̢MmDW]]J#M04FB?Lz_&W{`M@"a"۔esC[q~3 rOd$SklYOňdEA-3&=Ffb-tځ%uE.Vz,,7lvTkƤW܁R垸=kܬGimsW=^٧(~+JF :G[^9\r/a-s?GuXv_UXּk4匮@^7ĤX%bRnݟSqkwٓWRW6_Yb7_}fl[C+z,8d+McRLebz9[}Fsʁ[p wmbrC?x:_+8ZAqghr7s6$Dї#`{9b֒ybV2ob(`C^|ty Zu&: r-%{`"o ZK5ܬkuTVLˇј=A⌐ ]CXPJҺ:#6VX \)[Vw==뜦s\ϞYyS5zO,GgOw*o_[]__oD}jj-ڽzKjb*[/5q!vU[N +̿\]na#+|;,Τ~M|o"xLq-;3nx͕O0R*~^׿=_^r‡FΑ{>Gsm{;Oo˳۹Ma1js5.fΑA4sr#Jzʈ]_/^SIw,OVw2$ou嵻0a5?pŏwPߧ OhhN~ݲၹ;;aY8y,Sğo<4L9%ވ9O9H!7Đن)x3J|%pl(6ySvW%Oբϑݳȟ`^A^qvrZͥ _l1&j= 1AT@9dɰ'ӐZDS&͛X !&1+\]Z/:jH3յ<.b`;}yZjS 3RkPFZ]܌hu[߫M=S$j-V}>Ց۷AK3뷺 &[LPf)5czIKL̸ݹ`K뽎VOdP1Mc/9%J} G<™iɣWI\1;cFM`xgOÛ\w͹Va "1| 4hDT{W%vI$~gF^ 0ȨG鉠6.߽IݽdXWJ{ V13P2AsXB9θH!I-x\5*VL˝օLC*)Vm}TChsr :b:gT6BZ3qAcO6X?5/_W2Uڂ1$g Hk=u/5k)y f,^dBP ԑv,$KGvvidI|,P׌U H9Xb@ [^XTl`t[DEUp:vuơ(/S@D)8,!:_\ǐgoK'ǚhzS1f,qGY`\ZB44&ocU5p7-0@<h˿jaARٰ֞ *X+2vdZLDZU#(JY3 P*3'lU ੖X%C dƞU4صYp3w+޷X@gVBnU{VrE`70cºoYv3:41  S Ä1ȿCy`LFRJ J vƤT$Dn, ֑|60J  ڛT gfc(s$e5jp#KF^ߑ 628;u2RUbո qm+5qri`hƷk L)H WU@By7Z,] 4ta6PZvmM4 ID:Sp<|_z3TnjYbLB)!&D_}0v8î,o|`;wG-x$Z|aNwvZ$| xK -K8`8M:f%HY@J kRe1G]@NucY h~A4I"LJȼ ēfGaFƒSq`4M@,I0|%PV{e <o QzX:dAX?3 kc pn۠&k)] I |׍u>n7oծ?ŝ"Ofb2,S 4BX8;K+)G^b΁6D2RB -PF}]J0.B>`,9;5Kp-QR%K2fp @P-H/+.R34HF9!0X-Vz&p;`Ș9͘cn,L ȚbDi%&P?zP*Z+;M"BõBy8JT&P`uլ)bl''J I'`=`3SGIF; h2+ioPJR<ުi"dߪ砱JXv!?/ގ,A0ĵU F&֍£gWg0\,~eL{qrvڮ17 e؏}΂!n,̃'K:8%'NdF]UsðZkh]/ Dd2Gwfd>'#E;vэ)}4޸2 ֫cT:8uc ĩV N!+g@ BXX2 N F:@8N՞36VXTM5UnT0_֒ c6`Ad`Og' \- 5Rƙm 4VW<<"40P5'W[F违V1ᑡ@͛gw2D{v+bVLK8K)%Xy9=`9=`-gS Vꃶ{Jݫjzjzjzjzjzjzjzjzjzjzjzjl͵>{RWKdoXqQo3wxEm HV [ ܮw^}Rqxʏ=y}`5"?,oe#۔eO%$7C!'ֻ"Cޱc^ǎ8@9|'0sOs)9~ nA~F^3(Ž/a-UJE u'kj;=={OO.ڪB:1hs:\O1TX o`lR[x@.>1Ps

9M,w>3>W#jQUN s aV\oSݮJg>O>cN'r=;(tEVmH}l-jxC4ITdq"2N382*Ӱpg< e2ʆ_ -v`ƯrK=9c/>\7냿2d#6T/TαsF/xΒf*֛;d*c arƔb0^w6t˨=*.VvFcwK9ޒySQ['n1؇DOE8ㆈ">.ћd >bZ߻*$[RskVGAxܧ"jˉ8͜[Up=i^D\d;l⃳<*&;Xa&F*4PF'by"K0$.~ \|6k?}~n#wv9lX {`T_"yţWX텾FkMp[oqtyA}ap_Jtg٘}XA[[ޟ...:GJmUB * uLط]Kk9-0-/PrpI}_f63߷-n߶>},m-;_@ɖѽr-_h)ٻF#W>$w_ȇK8$F|Ib*%R_pHQ5fe{bOz׎z!DETo5D@@k=.4]J>܍!!QXTţaD͍hQeb&^LiYQ)ZKyY(qV8+,e&nMܚ87jxewlؔ`PwF]o>V^oׇt8&c`BmOkx1+^X kxa /5^X kxa /5^X kxa /5^X kxa /5^X kxa /5^X kxa /u5Xb+ WްX0k<+"dD zb#:|]rXo&.+pXUU&Pywp+vsX&fx`':TOUl7YBĂAFe./9XF nC>D 6x^R9ɑ"ۈ~WoM#*4w[xn(!֙8~bd,4$,X iGLJwE9vUiIe1g3YQb%6d;+'!y˜|x ^eT'X{dJkINSae9Zٖ@]lWy6Hi5b/ vo n U-uH1TEx%Tu.TjX1⳷~O}Z&w'cam'ΐotۆA/Yڇː6:4+_r +%ZrrfRJN1[;"t5q ]B[{fl:څچ򣉫kϾƩׇt>GjU=X@ٛc@)||m'h!.^\q3w]zhW  лuղuN:]90oZVвjV{z~h|Y9獖|:}QX`m^g{R-W&x294W_5㱽[hS=n,O8ˮ2W+J_lyf{i4gҞ{8N/ {h^dsCRw4wӺo>l!qEwnS/?!RNʰ?lii$NJ긧$pX >nowSjvÇ?V!]o}@G 9ff2 41INeT3Q:grN,S)d6[L&Αwm}ZJ] QBҎhB`Ijj} I<i_}?ݴ|P#LKRSv)9*s%UOiή%},9jWe-7*wP )1a"tJs + xMHQH%,H۪bhEК8G`gCyeJ`ɇ#"j-wKT•~x&tG#oxpI譐4R p/E䐌j#VpxQ^<]qΦQ6 pp G<JpEJ1,0kIesSR6Pl0Oy3N鯃oYIjC])4uͬCVS{Qc {ٍnOjƮ;-ab:;\:y&ve=aM?Y: }륭/Q-}%V^e@Ƶ<$?n2vGZ+ԠkV/( ug&ilr4ȥ:#q>.N^;Q6"D ̜ị܄c3c4%G0,uus`!$xNiq[8K+E2췗ҵX Wx^8>'"&xVm I8ML %%ShZXT$6EJSޠkEuHwjƣP69: fz?47S(BIMѮ]y]e܏}~( x" RTf="U,)iH+.*RZ$U@REt4=RWE`z7 H$ TWREMUXި"}QW .RrՕTJ#u+g+M_Ut޺)5%.P]i ڊH]J,i2H) TWFd]MUƺi5uUDuu&XBE7m+W2uUH1+z+bI4qz_uuBIiD=A]QTWgOezcnG1Ki,$u>]pX׻IL>8`:IjE2VTMu{3߽dfuF=d 7)|t[}óXPWf7RN(F_'( h`qT#$5]笶~?|g$u1rQh%RUA[.g*DΗL[o=+m&WRJ6$BDpa5m$n81:{ra:7w6aW"?H/ ˟̧˦fP4ȊRo@?p{%(Hb}xqSK6soނޘE׾=hϼ-ruH)мDYU"7 t]]RTWUL+-̕U&xXH)ѺDu%JH]ި".}QWE".P])B uUqE]i캺*RZՕ&R#u F]qYoUVu>^¢ueHAH]LG]qUonw]]jTW,%L+Җ* uJ<"›oG]#]߂a␺:I`ep &e`uP]=mTIBQg|’ٸ! ݻmm4x_eX%8RTdIr,^#-^x8ՏTUFz!rV~ {ƼIhȕV^z Y2:,_6AXE- 73F(mnhri lh לwXFl&I%' a$UZJRB%_>sfUx@Pqn*-RJ4\RW̙eAU9CO ̹aAצ&~_4֛2~,'С~{TQTl6~s!ޭk|Tx>b4.0ׅ6}{ioX_w5N~mɧϐot{:)˶Ÿ^hg1Iq ̈́i&YSByi D A[-T47ph!HSACgu9W2 jE A"Q2-iMTsB*֬ӳj>pn-ZbOϼN5kڰv*3W)h2$SJDQp 3WRYk`Rkmui^5V7/fg[KuUcSj߂b':woN\iNR޹$$o)Զ:C'0|4^ ׷8N&!re1DV>Wy%02iNٶَ lj=+`v(⑵LRk*J tr/J\dڪI;FL!kZCqc( Dh,PG3I  ڨ<QJcb߲ښ8Ч KѤ _a'W3. 8dFpYK;?\ڝܳeW<#:s 7H۶MpOo-G)KAV(0UdOß#48:J4^1j c}e)z֓FwQ- [ ҃N`@_E^!;[4 7v7l&% wј\$ʂ{!**9wR)Уy[ټ84hja}fMhepyd>rط߼K ܼĹ;}Z|ϧ_njquOq&?{WH01_XnfE$Y,dJ)$[~iɖ<ۋWMdêbU|vu;BXO&͓ܣ n/??O?o.bVi?l>E{ZM;jg`yT#?X|^ʿF>]Frv([-Uf;jZiDhinYAYpUҼBˊMuOsu+#BY:,ùG-}qǢ.EP><OX9l*{BĤJ@?Qk2Zp:Β " #bIFT21(R"0$IO';蚑<}HDo'Mw{ *)I1N`2854kmqn6FȈDc Rh Wڲ9gK$ϺNn#Ԋ݂dz>4RuMYu^<7l*z0"|rپ;Op/zw|tJ*Ya{wErlGOc:_o%)*>cXHm3^mM(t@u2[, Q@"j.x7$j B!-C搃.,D<')bjٍtfX،3B /ձ-.fxaJL tCe1qZX"yZ pVN+#Ǜz+.gRF$ X)CVgF޹ hfRX?$w"V-dG š*CzRڲ;M u@d#}W^5?_9] [Rk%3UiJF.JYfm`j>ivQLJUtc':p|g bzvOxD>#9Mgq6?¿-c5ሇήj(4 ߄TkkwR wW?O~ZL/.j?}pvqTַM~'濟lq2?=z)^)-Qeu9zݣG F`wO?ѝymY\{9%OKo8nlpA^?5[[qגeAB8-Hpbc 1_pm2B [bnU60/(^zwI?tSMu!D1#zc?4ɿMd}iuu[n2ez .t1+L̜ո&aeZnn [(Z;FfAϓw3+uS%vtسA»o-}{ɷ~O.Fx?NgW"XJ0Ï5r#W/8?~Qk^xr׵G57.l\\9IY!"޲w,=r}%!FhLƬsS)[h. P!BAR )I[BVK H3r* '0eQ$&d)A"DH%feq3 gtIQDR2Djs⫅H)gMŔbR2 I*h S4K2DWl5MԿܱ 9v;_1Ⱥdt 뀂:vY.g)[ `HņxYNކM"x-&+jPSMF[++u,M|d#C2X'8M3Ś[B*34uك>kNYנsw7Qy%26ѣեjX̬d&N qaTs^6 3o|0ty|PBBQK"D6P[QFR"URizD@tT!K2R!AH=IME%j)VTks3r|^|@߾^bz:xA]z-zzeEjj~|!r+iە"LdOm/sV[w k=lޫڶsn76ziܢ~獖a6notw?C(Wwt\?N7Jp͍Qo9غfM{6\vj)rbJߴFlsg//\ˊ,\o(aVds v6GL;f>@b~ Q·c`6ϧyoGx:yx:hRL/jKMDK,WZz8\4dK պG_aCb{J6h i-}c,ۗeYlu`Yn>)QxbJ♡Q9T),@7z7=v%+Pyo5e&{A0t(,xБ:mxT/@Sq=c*:d@BHAd_U 8VŅ$%h^Փ$# HII$a)" <%THXێVF1E!w쵳Ųi46zz ǢtLiA D#+k_Yg2̫kknHl/R㕓c:*\%)R!)AR-Q$E‚`ntw 8ϲ!Ƭ7(ogwuFߜvߝ~xUϧ] A'W/~<,uUů_"yo\M3X(gٯ_^ ޸<2 .|UEV"gf3?}$+ '{UըFk| è; JfdUAweWEדɼ.qgwݕ.Ksu&/z I ~ʟU%stj1ñBD*>ԣH?>H_2;W 7?^O8_?`W_odqvR" _u R;!~YـOȹ ?v09RΖ*XWkZgư H/޿ңD.-=mjeH 4Ah1>$.Zϐ֜YNǂP0Ӕp03퓷/Ay-t,: "eu*V J9Ӄ2Vj?(m^յOۑ7u>+}gs.Umnb`elkG}Ϝ`{n<-\)Am17SYGH@5 2wJÃ&:^}#!r6wB6lJa^iGjdEgHyh Jx-uPe~YP(0wulmj,k^}!mȒ:>_A6vi7 ߇B8̲ŧa 9՗%aˉ׀>\S\QL%9Ӗҫ6D?&sE>kf9U$=BzVڪT;RT Q)m@;L؈E_o@yRi.9 V";8D[i;!\oK>#ZY{)k<YM#c?/SQ #z!Uxd!R&Rry1t\+C*E4UeT}7mLS:R{MG5DD&p TQ lhag o3o$ g=%zOr[ mkȄR3q$y99*hL>ـB-U:g4\+MiYI.[J{<ػWj^H{e@qBi q(  <<MFCt4D{S(sVA` MgqoA9A 7B18JO)㾻ב.̍VoǍ}iTYf) חee_ķET_畘V_ݍJ֌n-".u5s-c1sQo@͍y,![nu '1beh~'a K R&&V B($<3`MLj9+j$ mX3a)}A)F/<Ktdj t?_/մ;#÷ގT=| "?t .㐖QR!Vʔ[=M&d3lFX#B oZ9(` qxTVJaJ6R $t ?59Ѿz<.O#Q.'% l=#.*# g4rJz$ w G>Q |J`+{ uzA.:59>x W^uHY@~ z9:CX&_zpSo_V:Cg,VHK^2b{Z?OQtJJ .UzQ!]+j-o[O?I姩 Kvqu2MVSL 5F?<3 ƥ@U2!h N-1+q7f7Q j4ryUIOD/iJẘ޺^k4 XB2fy43-PTX5Z[Ek-?mZ{a-2&ƌ 7%v"D]^i <8,=Ñ`%TQkyk;M8>U v1=, ]3x2"_fՓ..,"8 S)ROr(qrvcdioƖmt;yc@!6ʌ ; FRy $;A , WzwS,[}uv}6EYIm]:vcLK G&98Z&Pr:ž#nD#\F"Z!]\%WQ\ nJ%nJp#MW ό3sWdU qjq%)E컸J(l3Wr-HĕqqqZ퐾UBIi+TI$c01*]%jA@ow#IOC\ ;W݊В}\ %3튮 h+MzMד^ gc3vYqfV=ahGfl8Jп̭M0ɿxM p>#>Ո)0tjQy9 hS6ػ_9)T3_r_?Z\+. &uD9hkcN&VgL#u(W3E͟/\ea W;\[V&z!ᴃ8gxqNQu k2*ӏ7fTK 3jzr6zۄRV}-a\&Y W n5N@hUBU+LFiWSqpMWG/%AvTQ q+qjqh5V.J[q &+Sؔx VBKy/%1ŕ<t VtSex#IJZq ŕL'xƯc & Z]\%+%ރ35p)o̗A@ދ+@ujJ -x `IcUK]% ﻸJ(yj~{ !+;׮V+v&ZcܷB!lqZqhk"~ jk lNpB9XY1WϷEܧ`k!S<`s-:2Ȗ[9>lqDxg-P5QsPY"M(J\/c bYO.rg8>Z \4SEy#V@C a'F:d-r.h!!ܡ!֜3JZzR B'5FOp%nbЪWJ-[*Da.IX6F\%5E\%\컸J(xWG\Q$ WXM\"ZI]\%+&U3۪?2qbKY>H/jup1 ~䳱tgQ(SFصcC .g\t׹İsfaFG;XcR}v0Oopx7?j%."8У:]9!9xȰ]ߗ dHm"3yi&pǤr:J} mr(B bYpD Jp#bڀ,q(SY«TǃqQzF3T51$ABr 3/EeC+s16OssjTTo fQm6ٻڄUƬFm="R+U~_ {l,ztZ6);+oafVY[0 ٺVsp0TׄR5lՒ7k>ߣ2Ʉ=>\[+|-kY v{UAgӭ_=U{e6Pi$S<}2%0EPf$`mV|؛HP* t>p!w1|u;"E RrJ$s2JaQi:KX : )VKV]C㞇A(ͺ9]s!cD 9TXUX YgNd+[ ۟jkzB"Y*{"2#$2ڨcju F8F5 i 70S~RBYmNR=mI$q>e3VR{J)4` Ἰ+߷!AgG[Nd}>0fAy;s0~xE/~> '?鞾zCa8C_Gg/)ϧoՄM=rz.ߜ /Œ3gK,|f>Gv.WnT"V?A]< L/(£$9 H+[*Z\TNu;+O@ݕKsu&jàt!~;]PU[Y;_>t: ]v: wR2@( ˆQ)yWqXH  "FޅQu1>L=%+A'1}`&Ib'o\'TA(G&`>$X']'rlx\ޅ-j*a4Xt_XLñՋ 9\^_ʩ@`0^-&{Pj&r%FfYU[Z"x#)z|@Ϗn?t1Onz9>?{WnQ9|@nKw9޸qTr^%ٖcVd>N.9$g<Ùa:?+g_Yq|{7.aq!w^nwo^.0rW ~{1˝gNnw:w:;բwe9XU[k>7vvX>?z}ۄMns/7[R4}D Z'T¹耘s * vB372-DОq^|íiӌr}=~>_\˽jSs/pǂA2͐RqCaXssH7ƪB2*f#մeAi?rL99[NCo8~6=fYiߤƊYMFk5lTc;h5߼>ІE:_՝H 'A rf6{{WWrHWׯemϾ iOz듿| :R`M~J8p E{ͧgfNCK2{Ln/[:~ۗ^\,EA{RY吩8hW5NEbE`P/&[=*IG!RzbƘf4G;d9\tkLwi5yot{mr7'pVaFn,!|8d)H\"*d _W 8a7: I#w%1Po{ox>yE'ɥ\'vNUۋ٧lɾ('ٟ~Z-lxjv|Օ@OL2.NoמX:Z rV/bqktK r6Ӽj9:j2.^S hCy4xg <'Cp|MsmWhˋLz|U[MޚHVj ©"vж^'eB,ʓ-ZKiCijl(6ci;hW~t㯡d}q\02 TN-7h&tI&xkR2[CdKqݒ<tE=Nb6@;Qg0aP1%Xl:1-s`\qOz 8~wΏYnMp ?Agz>>^X.;Eo8Nad`hLZ~E0@$*O88j]ʎA-v$AZ2'|vїC9C)T`Ygc׸v DUjW["Ȁ1F: tA;ǵwaߜXᖅQu;5?Uz?-߈㭽߳(z< b>Čc.bUDu79|~`1ZC7X<D-$b5#O?'hcO? \j 栝Ɣ1yRu%"3A(f؄ϣ<8=B^"ZA./~[ f==5뛿Z*{2j^wub |J1Vh쑡j v`Bp-6 $66EFGID!HovMRg &gz<.뮺^~M񾸔b(|&f_Lu7lݤE=dk2)Lƨ7["aUueJi eŰ0A`XB<^Gxő{<^qD8 W#6BG #L@̬]e']ԕ/cM H !!0T_!kJ\ldR'<:xS;n cb0|]Ox{q~?W[^-^+Oκ~|7Ϻo+3^kPhi +\Hs^ dBrlm08X1 [g zBI؈.;>yHݣ?Tps̆ `٬[jŏ=C F# P_vb=,ya.{5FY\0y؛p?O%Z$щ@Y@QW "YG"(9r(2-<.mM_|58m|jE"Nb2죐*Ye" .DTǬ{-^*Y`D)Z!D¤ &6]8{ a,vq1kLn@蠯]tj]׻CS#6{1E|""m8"MJc"?a_.l ^ڳ\儶eJ<%xo- C6EG9 LֈeOպ!$l@)Nqljv#GT׍odotՆ&Kds!ʐ0ڪHc >cf\QOYYxxoϖ{Z]^]_'o~&|:o^W L%!V(8LJe0<.:J(רâEuQ̛EIVb$$׳bHUۜegۏLqy<:qC!4D4dϠ(91fuڍ^g}QMAWҔ3`N3TR LXL:٨Z4ξoNR \\2hMyB*5bSX%b&5@sԡ9#bC*sQQPSKEᒪ+RFKʃj90ŗ f18bsKfYElk0"H' $(bvL( }HL͹6!е!]hyqMgrj@o\]':Q,-fRVt3݀v\n@j,ne)܀-6D~(-k2!rA ɵϵqmM'8RDUc83PT>Xa rbH)|6Y%DLhRXq)K+I{&*vI sysAҶy[GgLqYWd;iRL>JLdjX5)M)@Ej)"%_=ko#Gre ~ X9g$3q>bHTgH8ڝ$a9͚zwW0R2ͿV̿$D6O.ٞ0590v't_Hn3T7g܉s\Gk֜i y:+-nx>둸j!(,ּK\F.^NzkJn(_4 QfZ0K:!zvSB)8%< ia`=76p*)cP IQO9?L!rWjn(vu8ՔܴU)ғE:G8D! N#ْԆ \zξt1Aio)$L3^(Ģg%mF%e~WY(r;Z]s!8:hHY1:pJ)~'|h^O,=X¯̾-t|S̆|TxGU0r yґOS0ίnݹ߬up^+`Ik%=tJ?-dIkzk6w&VSׂfȍqCAb=aVȬ}A ʦ+4o̧TbPl*U`G0˕ +u'] _dﶬPގّq@tTk7/3m`2~Yq_xd̕&b߲G glA&uVPvK=shOvCkNrF@84|62tH4I-S$*)<1F)Fn夏^u춍n۴t±W x]' QFS*"q8Q# `-G!%zt/Zz},gq~;JhrBmqxq˪.N,ei*˸_EF^L*g~< y⇌l}LTmzY٢P~g;6RpƛsVD#nl硫 ?YRŶI|lE/*$Y~KEtgʾ2߫ҳEbwu*~P%PsD)袭$!>{Cw3n7](O|E^.w+>[Z*J6n5>24r8j^lTT;hw?H ^c_O9/t́=[1"qeHjU~-*c:/2Wbϭtq4:zʜV\= [cN#%ĕ[tmF77Q&F57"S*M*~ӠR"ST%II<v}bCYX@ jCP*!e 1)bK 1![ @'Wצz"^&|`'X+]-GuH?q^?+V߾bE1vx0٣8r;)iESE,T8ǿ4UTtU JUqV΋waDX9;DEZ ^Nŕ.P9?͛?(rNWPK?Fw+\P9xkvS:)xC`%u֎=֋|:a]^߯T%Ľn) /狡w.'L%e%2Y .UP`Ma^o=zo ʺg,oTN}<`8e1 -uLt2R屏Y9O"@w<7 $=sk$, UnЋָ3*P(9F RіoT6iN5ap51ziMRګ4kT+4ɏIR"2$%F07G2:˜P LdR2 -Z"5)jcB;iYm\D"ftA!ZٳCK¿rNq^hc)M{ ;ޖy#%.e4 "ZEMUQ~&j#=2pj:/66pMN'JVYlAZC ҇&,! KEmbg9뒐D`<@r iF=Ãp,#=YcxN]B!czBʀ֣$0 A!x52c(ͿV̿v8//ڊ񽞓',X\v,O辐gl !ڿlJs1NFҬG⪅\P‚7dZ,q%ux ;9(O8{op2 q:x4Jq=#={^;}IM⇇Y!g^ (~HNj zo G)@OVZdxH©I@e$gSGF?10vmܗB]*s46Ni납~=Jm4+z\ ϩL-@|0DXRj2elz>Xr-ǮzC35G?ebi ^1hhG̋l}B рԽWv 0儉儉.i:ؐ M]N;0@ QsS@#׆9+ :*Fi&ru.Gr#e`T<j܍S9YYkyV]U=ͧ'&_n6e颪ٵ =B<VV\UogtjvZ zk?Q]O[tZ:ևOJ?EaON/AᵸռK1m6`ZkRsn1_<-u宆e-yˑVcFJsbnP<挔u!4u}$rڷ:J7< C"gӲF1[.$91߷*?=Hкm@օυ츧SZQ[ wLR{rezbmteRai6iOe} 杲K Nr&Ī@G;rSS'nh:$^SG0y N:xF fgՎn"xϢmmSh˩Wx;E1O[DH L(!sƊ܂(TLSzE^N1L./%;q12;Aى>zA\j_s7Nv蔱RQˋ(=0:\XtCd%uWI}kM"]gR:MQ4ٕN[K@q8C;ZC\Ǩ('efO$3reC¾i3my6wkeν L%B܄s2Ք$-+^:F+NKᵯfz1iX L+`ZKr" N$E6kLCVހ2Knp/<6FphvUV FLhd6E 8=m*4CĈ2)7_DPڤu.8'u}m a^_¨_|{WǿW{Spo߾y^D 8Z7⪚2-go?&6=w^~|5MeF`t~f=78ׄ{59^h*Q5?/Q>b/Tey뗦固iE,u^3wda0ɯcDAfKPzSQSVAM\%TP*2}n6SpK,hOyk"9iF@`TΛA.0`a-.rb WAYhzaxykME0M]PGA|7rV}7sl00X_2^48)~hQ\5y-Ue,~8/=g3ZbDQ$~i#SA'TVAwUy:74m:59=EM+-DYyM\E3"ui7 W'*Hѽ1jQ]+S,K@=qy:,WJJ#:/˅XJ:Jy:e(h \>X-Sdg2PFr"p+x]h*z%x&y] քݸ /G3R^^Z/S79L^꨼櫪H/D B$ 120 HV^z[%&Gk0B̗~V.=eJhi .CtR+eʩ߅QD@NtKZywrǵz_>6dX|Z/{k-X3}"ϔ EdqzBr$y_}~%pNY `zz; 5!;u&&uӖloMZSNyb`ʨ V)*% "!Nq4[lO.%]A$I⛒6 HJ Sk yJJ',F[ % O)2/R`<=[ZW#{fφ>o [7& ALn\8EӖ };}5 O5*'U<756I(#خ  h&ktj$BX(hb^Sڕ-ب'&DDsho/9Uwf_5Bˑ^!iQ6)n |㕽a h,X,ʧbDrbu P: gdB֌ɠCJ\%x%x4NҺkV:0>EQWtyR^>NVUpp!&rUV"XB7VE;rG.sR iȅ4{B:TB2䄲Q%M%h04Ex`$E+XVElPDGE* !:kqQ8!GWM9ᔕ@T<0D r ,?a\KCpv141.J?:q򾃱%`.~tx&U϶@QI6TRLJdI"US 4*F"C^ >zʃ{V)5Q4"4#R OJD%h(*e`:GCYd=OLVg *{ي>Y\n57|s؋o?%~nDy(8ޗ\3$yiNء'BGF)cv&pD$GZU/[#@)ՊQmL3 .O6sDO1 ha|TR jחGȹN ,w[P4ZSs~pU>Pn6egyp7lL3fgEA w`18pM3hxTTĹJu섷 qhNj_lrZt YBK^ |7Ok=Ajx'0>FB%MJ8*buDsnLˌ4٧(د.[|ScӜkخK E?G,y!-N~F3ZD=(TS# 43tUS]}jѼgMINBF:cfH'8|.5g LifT B]- YB\Lz/(nnP u&nnjlrHmjkjzGøD1⵫mM{{ۈ]b6@L8NF]Rv`-E'dpZah] ^UvyEnSI%0AQ+GcȂ,2Y-gXm|lmeh{{[ʸRӰ7DFcLZHFR.D1%#k{jN$Rq 95Ms5g|@RIq- -JZ;6Dړ_43R(/"~FAK'!b!i#q Bk*pqFkP!;]ά]|; [qr޶sh^,ﳚoF1]'k?Pma9ZxtJ SF@h B"d0&$uˀv(z%\_gzGqu_ԝK쐺/vC$%E2hF0lf2D,@yyJRL/ʈ@̗^  goN"1sle'Ͽmʠ^`O+z!cHK?Ý*ð`uM ~4"aɷeLT@ccE[ؑmYvcjzVaZ <9Y} 9th sQ0TH&"r)@ruØAHɥą`Q qKI$j>js,N֜ ô oڡRqiIhhJލêbWߢٶ"I!wIJAY+^ew4,rjNSL@C, &DANx (N!kZZu"FVG4͗:XLuAh(sABu= Xh±d:|Ybk'RVhƚ>kG whX(+ZF[5޴/y]I6%תr B~_SFIyӞG2sfd!'>\6'Cf$ѩ"> !mbϘsnm@^#z`#=cRpE-̰3曑jGqn[e=IYJGh}5u<8NOqˋG?|zOxG m@B>d+4.;/^l31WtLBY8ύmWN ع0%G.[gҖ7z #.бp4igINr D3=T7Q]۸k}͗xY/,_:]"!ẾR=3Xi\Y}Ői;Hň0\J2أUo5#V @ZS٭lS?>D(-QYK'4F'Z|͊wiꧧrY߹֌ ~jo~0_.f8ɵ;õi,/;*+Uu%٩#.).g3C~Y.L|)m/d+!u`@b[\cV6wcN^z>u)1m캬V dp<BƵ1`77Fv8oYJ~=z$;6ZcbmBF;]U|R'^y蒔{p$ߣJ?M\#fW)VAnQ?D DT @s>>;5 DvT.87s u~|j+'(ѨT)b5sMVbB%=xڑX}J-slJ+:/bR.~y83RC45Vbt3[읐Pk]WH8x_6qKL@B)^`YiEGNӜ̪C3Ơ6Tl@`mmuePʁ⪏8%IAbR"<$t˸Bi2>T|PWA0!bfͥ] VϺmm0PQBŪC8qM(jk-%UUjYo7/ǣy^8ճRnӊyE,YZ~J늛t#f8CQBJ/MI6[5@cCY*B2SQȎ݊$Bv /t #'JB6$9FIIsKmW섍ܘ`P't98A>"k^H0zjʁSKQ\VkS%ȩ } ۉc7i2yW/;mckXu; `.C ǫWU/@hVφy &XgL4 6>x rѣػS/klG=GOV)Sv2DIB2JH+j*9i'89Vu/9o[T:٪>Y獍z׷o\Q _oe.MI[;9Б5(H:Zs]N,{#cLw]:R^:+NTn4?W%6Z1z+(*yz:3ɶGZJJ1~OtwMd&Ms e=QuE,a+`&4d+Je}QtN7^v'}ԏ̝YԖ #:AnzMblK$tJ.%Yf6KrEf1Zj̑ ;!f_v18jV V{`I =LRiHdȣSd4mM̀B.F·$a0\ihbuP)ʆ/|jmah8Xă`&e\PN)Q !h­C6XHJ9< TkP "8J))lg|`DTQQͣH ͝rX;P7|q=e(WrŤD(iY(`x0$z00L"j@>0jT3U9B"z Brz簋9)%}4{x_܂uy4&2]EpC+U?* /^8U;KVj.S\z76hJ$(#!b]E%!+lF !e}]!d}e?}MT%At `.b"׎z!HE e 77'u Z;:[޿oV{Vcvuo_.<"TD+*")}ђhF(qV8̕TupE^2I6^;P[÷f* ASq>ޏY[2ĮZIpsowŧwfsy5|n*!4rYBU&&\Y3c=;zwM肛(u׬=qR0dP["CpC@"X7I yUH=<k$6bLB!N'F(ɂ.FKw2,N;O'uOeE`Ǚ P7G6Ǐ1_Qi9syҝJFkD;f0# q1r/z0vIBb cxRE#`Eu(TMiu)c$pqƂ89cP&Κ)XJxccZ 'M߸xRD2\2.LJ}, )nE#&|GBmn7koH5:z 9a~D| 0˵1$[yTzU F:a7꓎Qic>AжjtFP,z98u8a`QX6*-둎-"} A'ۼ< ҁ3\j&`Ed=<[ ёL2Z(ZO%f`'KVHh(8=S.iAQF xĿDPTA7V3Qsp,uB&Qt&2z)MW*Ȑ觲(Q; w9OR&5=ʁck ҳd#/]4(K,C]H<`Q 41V8]`IQ<pUA =Q+ kA++& 'cm ;I uށ̐{c7Z:3K*VZKgn {kK;[Kk9gk9OnV6Y9ܟ򡭘U3`UŌpWt$+ ^ @sͼTCi ͋5!NJW ,s !8 -4A)MD@ќ$F2cpb IJ6 HJ c 15a%bZ>5=vXc~vd?:n]|_ۡ {/vMp^)Ru^h/ 9bH*P@7dn-V_j8Wӳ wwd^`}HHF .$Ks7jIW)y-hΝ1rcwYg}rD_@8@v56.ܝt9;&X V7.;BIg?>]??]>Lsp7v=oCYAu V?KqCy:rbW%mc\4a~'TZ ~/.~պ9#}XJIMR<*Ww`DD$AaO/?zӰZ~;YjUU,/K5+EǧkA9k~~P^FŘ]Nf_t[εE.XE*R%KoQief^GSLmM|hH/D!mu>˝ zێWYk:Z <~덱y;^-Qrt>Obm "0)Ya4a5jМ_N5l}Wz??SqwrBZepKjk-U\z~AZuH}h|_0b1M.N\ek1;F3%BSG7o]+rEG߅'눳׵;vf*r}ϴͲ5 vmc:Y5$7-;Z^LOv^EDpK䬘oGyXO oA(TAnKF[Y!eʄeN3 El@w#^r7*eʷ,V\lgL@vO#bFa1p=F]+KB!((7ӎD,J2&_1Ga[*p0IЪ)Bf+2}(491̟`^`^`)[;duf>m<ůu66UmQWZA@ pbކU4!JN7o l%ohΙD0%KT 949GӑzSSE>"aӉ?ܤ`]y!Z`*]_:$Q8O.FGv{#5B$MLiJ.I&&} It|gY1 `^0dQ oKhÌ$e@:ƫd rGY@"*(KH;6*h,~ W%]'W 旙~j'ϵsL/lxpQ୐i{A" \2[CV/VWa0=g<.$EFMH !1/$EJ2&9RxP#c)`rt0A -ǛzIACҔGn! (*| B$-sZ)<SJ^69w-ZtQq1&8=ݙ,/0Y/kf݌l.ĵW""3KS4bb8bՉ6xG?SP&H$xN|Dvu>Rd-yHAevFD㛏cX 8uFO8cx2Ku\,P.ɥcjZ.L?gw%0 .%51McM]BF鯣W|ho\9zoY~?Jh9\^K(4Z%\]yeb}IsғH&</3Or/`~}uIe-&7K1z~^jvb3Հwo`tŨpˈje_ 4-*\fcfJVJhЧJuw@ʏ" v>b~Ia}u?,u'QZe+7 uq'ea];Gh#QW{.Mcel;hwXžTw{GgOu!ةu'>_Gn-T'ַ;} Rtcm*em՜<Ft=bFSI$-!HIHƉd[uyu!n)6)/S6'<% ͿBi6Y-S8lᮗӅч<ҍ,&?ES ;,_p•}Ca!ěUtDn%!}z v4r{.Ї86.2W c6 0_ gs졺 w< * K vB @hn|Xx:((w)W.!t]NWU_ZÁj*J|(8o8=ԇ}yqGdwytw{5bH* tSi9n)KX-_%j#@?ùf]R{|Ĕ>g7(}]^RWV;#|;#FeYyzҎD?* Rjq3MVU望 h*VPY-E0'ejm6.$l>1gRA϶>7C]G)P|ZRtIhO[da'TC6AIߗ^xpfPݶ|2i4#"70I6UKo[OYW VAgtrP`?Cb꜇z~̤Љz{UkFG¹\VQ#Tt pc3ڌ%۵2X\a5?fӛiE̥4Ȃw舱SaN%{AVo9s+xM̥䃉Md&+d>#ue-}WWKauBSUfXg2Z<u2T +\/Xu KBR\U料̥ z W^6*wk7m oVq=K̳0F<,Vy^cwGs$b2Lu)"57Oyxk 1o>j7}UaO_\rҕ-)37ɖ i}f|۫C6]$v~o^I8]2B(pYʻ@ W@Ip+ ~3;4t~GJ~/~pO xx.+,XN T( dpƸ!ZɊ |!=QP< (=I<f683ÕJjhrt4:hH& 6X.ԦoK:͂,VHh048= 9\҂R6QpNDzZ!HPobXg!D)oKgb4ј|FX@:cg_BÑH410I*b|Hre{}k ҳd僰< 1:p b!އS1  &ιN4C O%<3(0ǩZO(0Xŝ& oAD^tZ:*1:*/lsPpK¹,dcnY8RHOc$5訐#4h瀻+s!X(CGB1%~|RS$u`1E͜ 3 `28,# ٣G,@ц9cҤ<' z(Z%*%Msg˼SN-@ L } Dy%,wVz=:;-ϕnwhyr3|=7?a2.x^;_Z[\Loy~WEfvgл-A>ۓ^ߔ4q5[YO(Cŵˆn<f!eDغn]Tw]>g$-3&|QdMʫƻu ZP^k}?dfzOfO'%}館?wo[.[s1EӶh͐w|Ig>WE~_C=7"\RKS̰|>53hGy/xP bxŐA.]@9 oJ&wE4( zEAY‚*E*(4$t\ŃYj֜INnc(w>?Db,9A]:3vv+~5~!/9Z?R"wGkU*LI9\(mZ [jˀtEw^ Y D& ܄. E5%I sċZbhit߽:b[b`ZZpA'mp"/g'YP]^0[M(.2vZw($6s%8Kf[T|av FrZsw7/ޮ|]u|ςEarSeߪ?1UʏFYYT.wzZݔ%;ҝgcg%JPEeAiЅE8[dKg9ť{+P;ϑ|TM<*y,/0|g{-t9(|Dj)o~&kha T#IU`~͡şPߖ,W08L?658z?f,\*ڲ3qX ;CvlMNOOnOUpEaZ ?CVJ7*I䎯F/Fuj#){+qf}tk^]NS:|0g]ٛ.O-6FP,Ob>>S?քY 35Fm/\4k L?y@^;Wb`^Vf5(Whϗkca7n'Qzpc}f^{DМ FikX+Dׁ ˙D/#8UZNeR6?ҁRG.lu_D<8/x:Zlv%]_v`4x_@Z"Ü˄1G[շ+)8:<6n@vP7@ :-@ڨ<Vzw-mH0&]6x8bO=>La/ӗݞreo?s:Nx{xvPʕGkAFGw5any &uӁX&%`0kN8S荑 E.BBQYVQrHڇEERWB5H2JKUh鯲 ]NǏ[ =O{[T!}#/ss 1|AMY|D+.*}{}C ?@Blj:ѮWEQ~I~Q~>;4ꤊx ɔjK6he6[hQ'ӇGVⓞZtfEwn{zHlSխS^|eoDMI_R֧QpQ T sUXd,#QQ>P4W<@k,2V8o*@&z^y|߫^:_^^my1֟5F!7mE]1Z,ݫenp}yB7ۙQX[u@sG)ZΛ dhBɄt7!LH7  „ DB8&UIbi,:++VըbQd$exҚ25 P_Y:$GZTs1C̑L5 JltYg`%/Ir$:?a*h |>C%](ca{[81^=^UR@ѡ>u mP;],.tCR*)gQyrji+1.YGDwg5%Z[qkD_vd='{zCe@'UVV3ao~8Jӈ<؟.ɷ}8DꈨZ[w/?Z4k!qTV3R(KqoG'GfEJi.$̬}GM H3Ȥ!-P0[~QopGĉˁ9*f_Ԝ}鸏sM59pnsk+O|>/WJNdڢӲP(V:e4u@cB6ŻRo/ĠYfa4o6yd/z$2pn[V=voNdߠTR 5슰쳭ZWN[J(`UN/%G:bHUEΞ`뚉ibGc2='gǸ){@G { _[PW.D0ytPjEav=x?0'iN^/ \}obfjo9V*ʏD05H €QB#$*(M!Ȅ%J_8HEʌJWQpy\Z*86Z**d#R"J$73qfOƅ͌C̅v£r3+-0(]~?^}J+woA˯̌MVNEBk+R ̔4sdT)!Rt Y Ҋi,d`7|l;|U xSS"Yw.ؾ\ )능OҒ\;Nil゠%ARgK$L19(iuq)%`%$.2wpQh#UZ\Cmȋ(E#/uITR=dV(VL6PJzeJkS@ $1J>`G^</짯1>(l%Oq|t7c.({wOww/nÙ&&Ys~9Y:~29L$4ݪP;@c!"Vff"0ZDЕqIJ;9]ت~p:-]ֻ>(Kb+;C?lS;+e39tbNtutN( J6CW )C79:4DW }3tpibt( 8k%c;]1\lfZ:]1J7s+-F ]-Vّΐ!b+[+Fk(h #]yoS }]1Z<]1ʍˑZ+;v|6Ҝ܋Jh@ziw~Eczu9-7}ZUlVI;&@߽̻=:mʵS(ig2UUTZ[.9  qzf]P;HwAGN嫢-)0? Po@Q-4a~{OoظKo@֟w/ (!(zJ4CW V;PꑮΒX y;tŀ4CW ׸VѺ+B 4DWnENWHWgHW(]1`㛡+"VءT:G2RY+h.41(7HWCWV 6CWiV;܈N|I'eK[ qd`l3o3J?9ҕWlK{W:d]q6v)NWRˡ+WB?}JS:qj?v`q~#]<^Fa 3T;,z`j3Tgd>Tn (MB֯4 AmQP[*6$o ьeh[T՝B B J=k֢kJ ]1ZmNWp3+Qm6CW:]1J5n#]vVvA렙FPzG:CBڹ + bft( 3;z*]`%}3tpn=I9ҕEKo fZl#Rq!UK':=82Z%QёʣޕG-d; WV%^ ])+J WckO{2'Zw7P:5s%veۑz6m]zcoț0FSgA.8L5X5lk#D7H;Tlڣ?}ս{pmE"3KdB)%Jde4]`%bўB( J]`b'>/-^]1J#]#]9)mpC E ]c@WFڌtutugct}l?S&4Kd)hU[um)\U}5)՛Fs|I_sZ6E+Y% jtX@/>/_N/SqZ~t"}-(!>E:7rNenqѵu@ Bz}WvZy\/ ;o`~Z_qb]_V nx5k'R=YfTgvH4.)/7] #!%[cih^=D<욎0'To~M_︗=~b*jX ;柫23q@O :ph~y qϑPO_S׾]AS?֏ZM/l?)gCqĚ5.}-;LѾ$Vz7|W-[1W.O5$E~ڋEo_ Dm/?Ⱥ[G QRvִY9KFn.EͽzdJ.c4BsVPی3.R͚aM\O0w:[;c1H׃Ё5 }P snN-$K ۻlPEV1LEK)je"%цsPU,QfɖV1Ium0v4GYm)Yzl"!ri j+0.v@I()wFJXH-tϭG.} B8 9\ 1ZhTUiԒϯ=72Z#)y|ԯ63dڦbҥ週{І>,l2 ١L6b#V Ԏƪr61b5)xٻ0F>8=bdLBqxoD]F\RHxk!bTg1Q T,Jv>Bnw|BysUUuP ^TG)Y7̶6E5ɍK6J3 |+XÚnN';Yn)Ǟ\1cXo3&dT&΄|`5 j`%Jcuߺq)hK -F`vѺXm6j!G]C2j(UÆVX9[0f %xd͂U.JbP!k#Ҋ IewhBJvlt%aI jSNcG3PQCm>+h-aK>83 AL7\uơ/ ":±e6P6Zb!%l %4<2!^=VFW,*B[6g=akbTȀkt T{zèl#ZsA)% U#] 2U+wIQ22)ِM(pm IiI +QQY[4 U TW7"z,8(&`.mO J+Yv|эP!ՠw^KCp2Pf6[o]1K@ JAU.1a R t,,x/&S1RPkN u(R!K@T`>CA.[*MC{Gw 7XѦ")E2GQFhȒ ,Ey@?PiW ()4X|A)AYr"1EUDI)bug>= *+= BQtʚ%fTYk|BHܷ2O!#I KHz{thoX;f1*Tei5!pBK.}G|`Ⅹ}5˛^V [ŪΎ#D m3bka%a!ƻAy@r *}r@6ޕ&*fFVw 2rihyG a hhQh {B{[ɐRd5B5 Cޑ0\ >;Z,fva6f4YdћAig%з^TfMj:z7,J4𐗨=a]˩4TsCz R DŽ^|;[Q0xãluLdov ==#g_М~{odLQO oO 0X3yu#6?_Ivm.AN6ׇk>;v~s577/w/o~wЛ?3N6v/O޼&VODcOOڝ]r6'N/no7alww)Ƹn?#<E8'F* Y+r1> K9n\hKHVvN ~'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qx@y2`ܠWD21b Z@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $NuQʯ D'zZhxN R'Q:d'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qx@`\z@L @ tN6'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qGC6O|ԔG?^>Po`ܵvyu~Sw0,^q)b7.e0b\:/ޡYZYϒ:]1J|"Ԋ=;_ZѺ+F~tRVDWWDWWF]1Z{t() ]!]%M++>@i5tpmX ]1Ze$ ]zZ)ˋգ;qprzZ^2â+BWOdSiwzn~7}~s$t^#PWWs86ٞ#Q?R5}wqZ/A]߽;K3O_<4=^v8]'|۾>d#_ &;v^*}{/?ӏLb~׿J{~~xέY~TSEWz1xt&ed:3\z3NjCj֖rSmL|ށahbo r'Ξ)bц;C"\Mmt:qk"4OVX1~E{yp[ee[y{֠"`mjjqF̡Nj IQX ]1\BW6ҡLQ VSZ]'b/<ǡ=|[(#tut ?}tVjjbjOQ1ҕ7Y"bnk+A*"bi=W^j5{W֤C+FIwutfejj20ptNi#DF"ٸJ5Hk7 NWI BW29"o}`/z\t8Bi<Փ>+~=7&KU̦ Ѩo_=g`ɾ9WF|+v#_˘{{by xkq?pDU]mo#G+>z18@;d}&XXcȒ#8cZl%YnٚFD]bwOl%J`%EUkIU>JTr'y VEEF}ժ Q/(SΕ3+!WQ =Xt)S2%p?LIVSU Մ'V&?vó,Oo~La4Sې`#~$mJףfET̹IbտGoZz{\߿򟟦G?uğyHeK^6 JW~iẼua59?;S?kc|ӤAX.E-R9וCu̴ иKCD@4: \pܓ!EsRĶ̉&z0ɇ̬=S䢬 )+BACʟINf e&;–bULcg|Է^Ǵw9n&K2]bEg4 #CyGjX(2*?x3?GO^NΆ7?9 ^7tA&Wcڸw\ڰWS-F?j= !۷**&BU&sBie j+KF:98AyTQ8ZCE팝݊J讚N!a8yKWw/ڵؤ4%Ͳzr&\c8.? v+1o|l!rzw5AIGlu'׾JRq&ho&).eR1DB[ƤV g35/,fq.er z0lŖҜXL *R]#cgFlΰ38 mXh{,<*8*6.fK4Wi8Nvv7Ѭ/T0MnGY_0b$`Bh!eOp傊\2jdmvT b-9I0&yѨrH>Ls> QyѺ3vv#vӚy*ݙtڮcv=j6ACʤ2H&NJ] {U5 dl=LxP DKXE#PhK(B:HN5)agکt:LXT38}Lj{Dqċq1F{Gh2gˤW*HIfx>rKΝ"wH7.G sbr,gB )4hb$OZ(+rKDLj;ijbIɁyY=.n[=<r Bl"($[!A%'Dpҿd%03'+8< ln6z ,>_]4hȁX\AD^KŎ'H/Z-$^4Ip41i]]g&^I0[ AlI%"q<ޭ, %{,"t1_ꠗ}~gDJJ*!_9Bl|-y>?_i@Ȅɚ;n`4't<J =,!sO.͸Z˝t21VVdQ*V*DtDr+ZUJҫteskD$Bjw≐! %BHthBF4,YP!Ed"fxP8"W;q"tjY %LPRɔ˒2^*a2`E 'Ld /R;;cgDiX]M'N)8d=^}}k=~n ;nwX,㺟o*eu{E_0LPbHI0qZ_a}y]}=?]~/l#N-K3)dLFƣg)AS.^y;QB/1iC^kW 1A mypL7mE 3>դ 儖*)exSyH* 9E흛87{>MxޘX q5p>FAZk{22qGi-sP9r'`jyP¥Tزa}?]ZhڔGѤip{ƕ vu˻k{g-bmgq<nm0"+Xr[ }Z7#۞[y]%oѴrUJֲZhZO7z#KˑǏ;sFL=RQs be 2}ʱ:w.Xb QJg0&;:%r<& Cd1)g9Xs"JE#B߹_D$N5b?6.?l?%HUWP Տnؠc.6^{htet4-Ztv_ áCzQԵg7 \dGR ^]hξtʺ賅\ KM{J!8FaDv63% YQ =?9 uzX2IHo?_y:KzqTlU&gqL/q:;]M4=e;fݒb "{G׽[mՌ:e~ή 9sx&^4gnzH^C^C۽7 ~܉ijVf/ֿP}YZ~5m8ے|6nAlHzy(I'f/mFV53'WWu*d5\n#$݊PcBPIJ\B[w%Es3҃ʋ3-7c5y lT^2u)uLeEyƿ8Mav_?, xU/s[eL;!e u٬O_q8ο=Bfj[T`Ȅ2JKFThRVgA+j_Pwg u?,vzRtyU7@?wP#“ǪSxxO+ߨRkmҏ3aZ~gvZ 8J]R&@0*,C{1+ʼn# ;byB=dG=~ګ2U"a"u2u@ O:Vٮ4B,Ku u!r!rrɃNp~bs })v:q tZ\W`TqwS1=&ʹڠVKgm @6 (4FO &{4V:vB:6XU7Fl_U[A}8޵q$e_vU`1.,!Z;9~3|Ierf8_OwUMuR!qN~U:rzJE|b\O6XRL3O{r{V "7rnȴW+ >\@EET>$RG9r } x56smtP* (J\24hD굊z>{MJPV: N]VG!{ߜ6pMN'JCA뼬t;:\,!KϬs%D`<imk X2Gʇɲ2aEB B1 M+#NВ$èR([#\ hͦ; I0jno螉na34ygܞs))vV}Nu>;<#8)X፬ּE>:fkMAwh+-A1e^=V'zo G "" OVN  Hp*)c2$ψ?1LQ[/:e%sLofhl҃ DPug `#_'TE1i0O'꟟7)t4D(M _/)/rRMG3s5~`f)/q[Ns3oGìv(^xA& GdM"&=2`3Ϛu|9:i5WSх_4u~qr6 oNOVZxڇ<,_ ᱹnݢ]'4EYsƫP?~2K^vq|4fG0^s}3MZ=T~|]nEUBګܻnE Gcxj| ?-ѝ[j½R2+;3+.LVjZj2c24Xi  =atIlڥJ՝YE,lf^H+_ٙb[wvaj ީn3"u1q!-U)ڜ$"&%0SFraxmr ep[3Iù#ho /Z1ZeOiԮq921QwMol-uu^k\ib ~+ƹ8z!6!D}r%Sʙ$ ?~t{ȼ/b\-~ࢳVl"DoJXbj0Aဢ~iP,b߼X5)<,hF{Šf&٘(9d}p6|QlCQڋh!Ԃ&>]4^LHεR˱ۜw2Vu$H3 aEvZ0B*GX"QL%LiJ/ 4Ln'&ʸ+Wz6 ΨDh"<>',_=~?ZM7+6βѻ浸y[2eGʟ_qxsӛO}_:}NON}};7o'YPxiG9Cky"1Tv0jk/''bjuw|ǧ ]ДR,3=̯oϦ_Zvc]m?JsʼriZۓ6b8rr?NYL%;7Od~m~qU34[3p:+s]knPޢ;ɞ_u>Ÿq7 -zͷxV C獊вOٕE{]Bu5mKR{ʼ1f8_5XQMoTV 9 ,Ciʚ }g)bȆqٺݟdܙ_ϯ{jGxGp{j_7Gbsopԙ+⸖9?i'QQ[gH$N'Mgco7=ةhs֩`TRE}=Hz" (ךY ^mv]mxX; Ip{w4(H\kJ=.hCL$]T) Ge0(fQ,ݼJ$^t\p.jI]KARIR~#g8a5|?F)т:~XuMI`riH1s!y.DEo;)sB(i1*e3zM>*1`W ~,KN [ mMx[j:0!:묭jTu g62Wa0!wdBꞛw& {aBZ$!yCGUbSs<0굎<~]dh$)0k oﶀ3/x|PlJS)U5y~u fw7 tlA_^H!pQ+1@WCtLF/gxO0aln360N e9ؐjFyOjaJ Y ]AnGO%@=-vRZmbsJg 'dW=5`]@^55b" GB8 6tqV(]EBb-•O#yh[σ=N73+ȮDŽRj=r փj_SJP+]6M={^㫋Ɛv!{'ݫmmꝫ(ךYI'9brJݤ>l>×%rk^Ck** %ǜ3+~jk)u'0YDp )E"#Zu%2 |iOq(5hCWWb ®SnOWR)2g)2`#+ˈ,2ZNNWHW %EAt%-G]!\Y]eRj+`?i,t (2\UF+U*TJr?]!`$b*Åb|W}RJq/2`UN\1ss&NWR%ѕơjʡ+H1k':( [Ł`( Xb* WW%]#Ij=ji5փh-z(Er,t]mWzk5F, mE+}KJ)}*Ei&w`Gȕ >hyrk\x+~+ٹ?q0\Ծ6i}7+orOͲ?];t2~R˂7ͳԢ EDI=>|xwRƸֽJQGIQ N.Ni,:;EVZnfO";. hRc/@.fr6:ANd"px<+1Lh%(ڮYJRDr`2\)nȕFvRJيS目_,2O1Y]׮@ ϴv:y ػu.+Nb\AȕrWO#WMvw%ƚSdO$WvvvfRZJk+\#a7r 8#2\> hH/L+sWq\M2nm{h˸׸ŢhVcТ$n1fLJ5^{f #ޭ%<ュ`I%$}*n0[v{7{{Y&N$W LfRf+e^\)e\\y}ɕ'FKvwnJ)lrurE twRZGk+۹S`;\)prp+vRʵ'ճ;C<\80\)wA&+G+ťi*z=2Jv\\i 4r~w!]&W'(WDLH8iJq9"WJ{-L U 2T\7RZ.WJͅ7zb qu.])nY iW(%MNP"I?\Xhwf+_RJ)ʕK&+\뫸4M1l.W;m*d&+7\)OȕY\)e4\f*=r+ {j.hÑ֮Q@&WOƒWZ}GAkL24kpنsI-QoލTXn.1|g+z8jŶF:vCI06tS\ Gz`#cwݷ˝оyy{;.o./.vorx =3׈],}}o VGrǟ} S8c1=3J&ӗڃI.?yYm~#?c>gMp+/Ye/yM5WcDyoeAx ATV,R\{e!n VV'ԅJO#W4\)m\\)lrur{'+[+.YJi]Rd+ ^&+ȕh]@*pL\)[}E>mrur䮼DcyR\g+=Vf )nrurE'+c MS *-˕R]\T=\p+=@ )=mru rHVfA餟>{^B[ %3>o^7oB{sB~ۃcwFGV> ~o/..kjD cwhU8B'f w] 6z~!tO^n,>Kٔ/M|z?uw໨}3^_ܶEqv//dWFru|}9C30PWg[G\F՘dD]F()ZTMdCrl IϗVbr%ҽiD.zor3m.s)8Y4cge_g[}>.25_짅4<(a$K6, [E3C^ZJbCsP'F{(*#YnR֏f(9BXE׊O@j_,Bmy#kMF@8#-nH sGҷLf p-s }KucGG*U%Zr6[dw |#-~{aRo{l[D@T14vl}ԫGhr a\v4EJh̪sC=";#}hxbbD!8~7z@FB2(՟͇\/l$JQFu*L%R(U_8Ibꬪ6^k>bVH%5ۆ6'1"7-[%@VcgRkaws➬Ed,=%c##;$v3g Ȩ&M(-)pP5H3S$DHT,йuRK0-F{a1,[мX}^ȴV!93 wsr^؎<|lZbX"# ڳBAnQ:K+v o'(L'Lȗ*}HO;HX`1أ9l^ T9<j۱Gx08QTz pyv%>L 2hGo ǖژ[ĵ$e=BfCmnhJܡ˾Kp' {aMHJYF6lmbrw̡*"J@n78(ʱ7g@Pպ QL ;O-J#4o|G:D%(V݇+%!LHpm>2 K:$, z$bQAQiTߍv]-pl75VP2^XRa v%,V uZG q3vA6$=$p,%D&\t(BY4(Tg-"ueՇ%N0t掑aEͧ"JbI*)Y 1JA4aފ 2O2 pe-Vz!訽IE,8J892r^XT5A[?hϒ(Ez@?PYu┊n|UW\c4Wj B1YJc.%:BP-R{-b@ࠄY;N(u{4H.91"sEՊȐޣ0ModPg ǐJ/wtwX+fӊ1C*MUkd- !hBK!C}#vvߙY_o:ws1Qwu`D6CT 3 f3._LБMW%D%mHڴLUcX}LC#d q?l(蠰( ₙPV24ITiy],C=Iq;F3]KctXE2}eXV{U;dq,xP' ;ϫwtr:&[WBNCu [Nmgw}6yvCR\uW0t6FCˈ|wic.)ouԆHT&ĨPK@b*20n?@]Ki(vp4Ks[S-h_1y@P1-Lo@7X!Vv6I᠞1d$+Ak(P0@EDe#[a5v"8AEfe](-\Segc{PZ jw787gAmHd lBfcrXFA2ݿku=i; i@eVo;JH)F=L*"2AK،O =/i^,A `FO [萹쇍{n*>_N^_Z䠛6Q]<`gP3=EMh(k˿ Qf1hu\k͒58)kn̘ @y}7G#$ȷ=H#Ԭj7LJȀ=T9 _Qn$kqPDGI,Ws"S@)aTDAO8S aP6i1TwmY;z?] lA=-)R!)}ouђH=Ŷ8dwT׭SVs+ f?VWL"6f* NN \rƷ0wkt+ЭapP0V¢1\$$Q%Ad``Vuv ^.#X\r,qЏ$=sr-ߘkyH 1Vx6QO>R3I:!A‰`!peR?]s4f0werXV@ueMF) D)G = ׁYK oKж#Q@8 ,xBg";++B?Sqtd0K;=5!8 "qp,K ^q 薁3߼x …H`4FNmEY˥0MfGO4,o pD$8!zR`yJt(+,;* 0pazUߌd'y XY<B|x>_:<|]|d6'g#!p!nx>s7@{\7Ե!bnj #TB1+%GJ QzO]$%T @/Q `¦B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T\%2}R`G haVP2J20T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*^hUϬy%7J { zJ k-3B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J TX%0dGJX( \*hޟ (3^H1zT@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*y%z-읩>8x?.uz}j'/o~)Yw?"9a}.`a#\*M7%@p.ҵL -zDWX2*p ]ruBK+ =+Hd.}V}RHWkyrJxWȾUAԡ骠ꛡ+zϮs.?|`Wg^zZfR+HW;wegpT37~Iٞ!!f{1p`2CK8X8i>YP׸R=x\ ` =݇d6愓>Vi^}4pvv7rBWŏfP~08<;Zl,-eowr7?p`$u1rQiT%RUA[.g*aw>;8V=[ͤWYUYsUjQB:vTc]T֯ ,\+Z;q6"%?'wL'=7CX#+J+T}VZ߽{֭),`4kK5`M:{9dz+iZvwMGO3Tۜv0?wI9K>ස(7?߽O>FOZN[y5x&8F `5&)7rptᔔ4M1Moy@xeN5urmiQmEj쨶LthZ_#`Lꔈagp^Rû~Y*Cup)a8-.(4<\\p6pH;%ߟ-sYZh6غkvpd ^g?.= `ӟN{p_P* 7Uxoռ/tUt()HW/ ҰV \猪}mUACz9t%oo?]?`+L_誠Ub骠ҕ#*UzCW`NWĵHWJham KzCWM0XJtUPR+ +UUoJ_D_誠}HtUPw" ]> `Eeoe/tU>/x*(%Gztee)c#*Mf * JfbzvHJ]=eϝw0=/]= x`a( Cڹ-5eϥABd9K*%lծ>MFi&ˏ*#9i0k VVW C**I 0'-g?Uɝts2'QNKoKMiNfw*ubZH!3~(զln>٬-{UyQrxQmxA[J裍VӿA|d3%OuQa\UNY Qs*E d4%'ohSlhE-jWwjo'o&ʳWnf t_)n~/׿pe`x>ZNS|cn:u\sȱ*?EC~* NZ鍯f/5~ZtXMD&J.qy#AFF ,3cXύ1QpOa#ZdBBL"E/ &j͙Brd9x="Pή55ZpJߗ~iϿ_XNI+M[,x&h֘37diezlk1L7#66Z?6[7?Y=(/VF*mնg\`>\VxJ#]6 ao4 '+}xtt6H?.8 \m𚔜6%CuQ&QRQ xTT̹J3t]'-]uG;}C ɠ#2}unJS.c'=q- 8o-G7)c5w|1kgbup~28ѐVwfd. Pj=h;j@ 7-Q7@u4s`୍cXN9oV7mT8S>@cs庾7ʹF;IjURԪxIIp]0?˺X4z7ޟ׾֭]Ӹ-Ǭ`[68 f/"_s#?9Om7r[^+@vE?Gig^T}9ěWh;D{պ9٢LW=e_,FU{*S.,!TיUʂo% .WA@qQ4gkuz;rVAa+l|+DeԹrR[pW2J4,rrdzM"MΕg \NɥdfoM)zAyoLbL+x38w/>ŭGkwλuN+Y^jaV2uH꧚D>gܳx6W;w)[fNkZ$+E3LH\+7RgtNjD[FT6$ 3Y 4$ah%s*/넎jgpCi!de޳GϭQѦɕl}x7^MۨP۾^;ꁻ>rO*ҴlQ}OY=Fh]ӌeϤ&KH*˜MA3`099~{R211sʀzR0HsҜ)*T f͌*qag+ꎹP#>)ϋgepzcg0ͶʍFOl36DM<1ɘXǨ0^ʉ553ތ(V!bnf5`EFB%Dfc36 rHm:f~2s_X3ؕMǬmу⃗.Sb6.s,섵$?-!ҺTo$iO&&K(cvc> |X˔BˆǮh;fDx3o2.*`HyΚp+DFR.DeJFj)#ZC!b$MS5ēg|'DLh4:f#o$AW{Y\ٙȋvˋ /"/޺dz 0"i#@>2jT3U9B iŧŽ:n|so~r6Oyj`/Nl\:]'޲GXymJޚ٤QIHg~*7v8W* KUad]ƩFY$B#CJAJpxU. YigV3g0i!DHwf!^$B8ˌM5 $j'| 6E$0뵣^孧)Qb zL0#07A0-$rScr\ZN2ehE  @sIг =!ޅl:|OYfwGƷMVKxNN *39)D&ђh3DQp +,e:)7Fj}oGN =O[ԷWc:p_܂3ŮPF{T%7U \՛*V^%l*/J2U(d%]hdztJT= 湋.$D3*yOU^ )LSlXykV*5Q%yҌ:RxRJ\dڪI;FL!kKQKGVTTquBFߵb\ **Ѣ-+\ k4zw+k_:9b0L֔?{Oƭ_fl܇\yNRIm^6e'T@I8[ۘ!)j(l`ݍ>XQ ={%_?J:q-mLJVS擆-r㾜 =^QDX.vĥ[kn{C`-3 #NvZ2R)@MȢNCR3mт%Я'Q4؅.Fg$pqƂ8E (i))X - <֋,c2K>E>Tc^6wqiVjJ|4h5=PFm W9 <G!m7"" tL s_VUwb9&b Q"u8(&6j$Q Y@ wW ,s""^KDD)InIɘ ܺH;UI$%Fzj y/ӈO}fI6:e;vxqk.?8 L@דA9È V4DbYbR P+m9(\V!q_5 q߮ ql %@ hÜ1diRx5zXNŨRZ;c2C k@|EItLBף%9_B%kQOML˂z3h_HYѱuez=x|Exf[زM~=u{fMx\fx=1oʚ}<N'.G55[mZGthl{} gTinn~ ݜq!c%.ֈ}OyP`U)B),a>4z'Mָ~߻te?`wSpbv IL&A3(4F 3)Eg`NKrHʴLաtL&"$.Tn~TOQgx2=KjzZq\FjfH8[ibD'r000AK(E2 s eΤ,a oVs|Y)//9Jp%xzVlٞލQ6W6|l0!V[cp[!.$`^E8 AxdTZ+O?g:<d=8O ') 6h*PAJuy,$8\]Fms"8O_nq?~4Uف9GO)B$*"Q:$QIrRy,2,9, mB^BQߚoFuɞ>x&];8m&RH\>RƌW(2LdDGO<.qܽӍ8ne7=kncYm=/qaYggIYrp9}Ⱦ)T|_>-T>%f-+$kgjUT9}ڦGGbݴ/D2oܸ+iu~ nч%A}mC7j϶خVثU16vlB۳(o5۠/vw-p\wY|}n @ v5[N̊m]A -j0[!]` kIΰrC}yoiJ#)Otڞ"]@tlԂrMHa,7Zn>s1dzMiR.:hW(OP͏fgՎӋ!}~dCڬπwRP%p JHliXqP4W’TVZ:’d9_?W/_H Vg/rou`U(篋\$|3։TqÓPUqgEb($}=e+X`5_!;G~gK h,G6/W-OMUI 7* e#Oe5-lmp}RˎmU2ɳ˷ϮUrEPaZOaVo ;V% 7jJ/G/MWE:EcR]y277lzy]_]?}7=x*ί6M8a OZwb)4T_ynK"Z/&h'Ge/YloCY Z߽Xn%rI"3'mdn3Gg)8A4 чF}Z)$PŸ*,gpʤlA?{lCGyLƎ6@ V3 z+ шZe$0u ~e4ٗ6-ɅC$}R/IG۞1ޑ~w}ȭҧDwX5 e:[2d:͓yZ{&dX3x2tVe諳56j OϟA'9gn*Tdtakw~39gCaZ#?| lMrض/emɩ%EzzK# W:o{͓W;~Q/}D.裫8!>[EMӛ.@mW=ɋiZX-uR4DkMQ?isFvڧE߹M5=?X#"[ l{[ZZn,>6@b6}|yV4^HQmP|l{..J-*E 2zHi%WŹѽeդT*_QlO0ND$"ZȱIiٶB߿ݙ7t8X},:Eb˔!YH.j^p< ނ3}|w/’vV-eV sSܛUt[]ʫ*kyi"< oY0SF:ZF*R)Ko$x{͠|f΍{ DZ)CTAgi"tJT ̍*ɭ50h*qѸ1rQ7_% Gi,@3?/o>iL }qjó#L,wuH,,2`;٧ ~v[q=~j%7vk9s BqY#mG<:v:"VMIJ,{:PIQjS@(^F^+F%֡zQDP7I3\AF(EZ 4UZ4 gWaB3lpSЃߍ;w:+|lWyZϐCJy|ۆ/3w}vzan?Agp}:S+;u\5Uˢ=Oؾ> s*d@4$f>8QK,N$"s2>4{k6xpZ8";fvկB˭1MeQfZù`K ".;)秖Ah=f(`7CPu pyYQb Bz0FJZyNo*g<Fe.{'_=nKY)r%"Q{i)fcs-PHM*bh>oN&u ' iB꣣(= QQ%M($!Z 0k_!K&IQeC|Sꊗ.8`tZ)jڣ׎I6 gY]>&i9 c^7YL5ab~#wwBP[o|Gw 'QPw$ | ̓ TR"tUIbZXL':+i DD'!V$+g2(kJ*"S>*sL8Wt)J!"C0#6BxUY8W%BKx4EɽAKxiiq|tBS=46=`~j"PtVv`]!T??ub(gΩXhr65 D/P8s9~9iђ{7˻yTV]Vߘ2~c?0/~m۱Q\:GcNJe1A\ lHF+1C)6耪Jg$%C;gfGs%dN))j9Jq7 g{Y ǣmYWgWs)+:|!f$,GwÂx4#碰͕i`# C%Sh-@9R4jSf`[TY^#jhZHT2C"t0D0'Ps-I_.\kfoNeCwfB9dfJ2`NY6уLq.I 1%4I 2ea!p")KΑ&qE\|]n|Y8ۓu͕$s͎Ld|m/en Pd.&'_5s%.s)aݍtBU mqo{,>~a .Doo[HD*)#ykDAC :ZF9{+Z@ޯO֞i P(N" 2_0tK6^WuN(<Ķa.bzD vo>]lѫid'18.;:z!td#{Bٯr'#r"_up%vRzG{WB. b88Xf4i쨫>u.\$(x-HR(\< uA1C80hCL]M&GGwCq~M&G:67Vlq2d@08I#x4hWuИL$yd󕬿?tcC4.cPҢ |ۗc^h9jl͸fͷDVqE J0wzp>Y_7#Nn^DzWkYUlʹgPTj|bg7ێ^i}|/lJr)rЎ  D-|Y;wGY_FɣǢh:},h{G1JS"/ (MU'\fa.)UP >笩H+lrNA}3"Rr*+MNqA uf츏ec>nxGܻ?43uϧ$|.NZStΫ`0;SJ$>Q&NGmiDaNМL*bIb"s:P Nl*퉚ߎl KxLu[ֲU dUw)]Gj\ߎ?LWZZ}}k󜼝]C2R}1O5>Tv5۟1ŃBQ cUKg !RN8UE,VFmM3 6KOȤR,*5&' ZyB_H^afxj-4kp!nU̳7xt9<$W_~uyxWlfM*b􆔒[ N< X;A g)zF3ueR%m+Lf2T*d-ti#tvº"et\D4.tpW0N XvԪmWm۫vGf| N#C2道9~0A$r`L#nZ icEO 9l֤,Yڨ9JQPQUQZY8߷|x,xjEt+Wč¯m}^kx[&bbLEk*.aG&gB ZgB bYQLZQj8ϸU8; _d0OcZl6KX E.quXr y &&1 IƀJ{\1(AXuquhRip%a&&u5$b&佽 w?P$PHNo-4ЂH|rq@'`e,Y'=ǯJ/@y$,y]'C=kо*S"IC4R&%"H$SDRwA!YX>ʧ7cڒ w:Ox>$ЧV>8y*>WM]t=Oo8@)P$ HR""Թ<ƽ0!g}TȜ|f8PT]lʃL (r vBFr]`uHWfMS^ܭ_݄|؞WuգMky˚a~S3t8pᑈZ(gu}  ETHDk0Hlp#gziSxFۋK+k'=T ɠ̥Ͳ1%>&2XmA}(j.z FxE ގjo *+Şvg:]͚Y=_OJ7]]sg8n;zy/"٥۵=Nc?y43G3"|$K#a̚Mi.̄aŞCS ͩlyt>zecI'n^ V{i ~š 4?j޲%a0>&햼+Ț;p{*??t/;mY~ F_y͢=̕|uK|cUQhh,L1޵q$ۿB.6c%`w&A`T,]ˤb>DCdGyfSzL Ȝ0KN:V'>؎!lVa+nL^-3r  yRT7t%x悎8#?O{ۃ^+=0$Moo]oo]^yrʴڜ2Q*ӟƓLgʔz b3ϛlJ0ƓTM )7=66x8 v>wBu;Xt t *m>~/JKpD>W2D.rd>8&5uDX`F}pE8iC)Y:=VaWAvRJFܴk&(ʼ/\zH0ݎCZ_.+$NZ;z'!V93f͙S \Fң:ydPXF'D3Xձ4 Me ǔxIߗ`@zxh*阄rhѱX[^9;~JA4RW>RR*I@ REﭶ &PftN$A eљ"ζ@lҗPY@:Jgq:/zULP' o) #SdVX(YEn;uuʽU1ag.1\-Gjps RKQW)8`"9~uhS(=X;bBBlc[% !ԇ)XOu* $zc(@ȼqN&s>) B€JpO[0`)b`A?R@Â$19C>ɬ Ӈ66h3x[ N|v<@%I4P~έF UDX^,+g1Gc:iy m6ZDmJyC,1)" {Yif}tA@decƠomiQy]~c1/:pȆEƼA1Ƴ":j5c@騜U_IloڤAgܵXkQȺ+t_Hghwmg i%Oo(5i|:iP]VEӍV67De<#aǝ7T+~;ɎP"&]@k 0' rc x7F 3k$g2+Vv-D 0H)78 Y X;K? C\;9ۙCRٞ>gu>~8}'<1J;3xU^S@S[ \mrY˳Bc9YN~*2?Akn#o~,;t~f-Zuj)2$Y 1qkYiIŌC}3']A[>(M]>aұFk3O}囅[ s{\eUn~/kf.`PYQMfz;Nf{A_031[ozA&йnKAꍃԛx0'>Mo=Yy, ^ )xy ,(8gs@+;RɎA6Hq@Ŗ`Ά <Y{[cFIsxRM)IG-|꒛0%z]J>ĪL2G٣Ƌګq5p^I:G\V5||DO]06uV>n>6uw3!h+ v2>b/f|];^fOџ޼ Kd4Tfvo 6 =L0>?<ȊǟcmJMokHppŶ^XdB},/8oC؃_[9Ra RƔZR˫Ɍ*nLs?;7jaEk_毣Tr@PE/.^__}U!s\8{_;LwEPj]tha3E=; Δ[ֶ㷾Co2־ҭJhڬ"@)ڮwU 6 k.j H}Dn}z?5kw^9ssF8]swoC٥[n<7V,ٻv,h$Үrktۺsѡ,cƊŋY DsBdd/d/Ju}XMx.^8˥'Wd0p8* }*pU:\+UW_\^`V)d WO3nybf\?\=J)W p%{ڻ靰NKK6`Gn}!u05[;HiF6MWakwv8G~ _G)|vNCt"?F~_Avb]?FǪ_iC:Nhl9[J2[@#7wF]L{͍,@ ڰ_zծӥw s҇X9ސt[zriy>YTu{Di-灁1^>[El1@VG<71 ֯M]B.~4%.6ϳ̽9 h-Zq2 fZy%(H>LpXef8z?\Xl#ÛR)ͯ5-X 3%ėi+2k-*Y+ma~M6'*R!E1e',z*8HNAg(Je"s&yp2F:@Mi5BRJ][L( < #"w*zC\'T>Pq+yK4t~C~gULM= ޜtUT^ʥc:#42I%n\lȃJ1f| YHu5{f$`c\`U&¥Fi΀&E͵D`&َJ5,3BS MG{aŒ7ivpGI^Oֿ ^___M_8b$,B(!Da`=.-2$6%J0*k(FB ^C4Eɷce.9̂Kw2g;b0]Ajc_ԶQ3M=>xf(@NA "I1O<-00aV`VAHQ)dkbbARC\QrAَS_ b*D?EDW]="n»-FZKOޒ30"l.eV)1 ܐrUDti|@r2p g|BIs# _fZ{4Id\ "?QeƅF\CŠJqhmS5>YlV%#lȺ0AdIJUzNJ0N!'L!Y yzSyfsCwܖ%^!y E3'd81:%)a EKM[]~UzDŽ? HEMP8#\Q y%y@0.БRÅ(}Go}&MhhO?nӾhfYb5mgߜȾ~L񽿽4a$7}ؗ0Yo[j9-r͙A6EmCu5&eըjS[5ɬ@2j2O`˜9cM ! a$m)rD5jPʍ"6tdXDc*-J)&HƶٮrsҐ9j\ƅt{z9?=wT>!u uǪ9Ԝ#AI2H = ai_|lӦɒ.*Q5"KmV%ɰFJԨUX#볤YҀ%К"FM7^ U'xz tR٤3MT3߆NI8=Hn cRBH$RVa$58л%8u4PfpfնxlobbS (s>>}EOu3)T()ʯ[3-|BT,zI+{*ɸWCQj;2]i(CV̟'Osl@뼻 *1iͦK-J\,)4^|M2M98Ur.Z/Q!8hβv* 76Nt$fiw mЃ.&9.֟Fv&[&l{vnFvv>y1xU3*:~ߩyVotWW欬: fzɷ>a1Oz oĻ.g$E^ ?Li;M KqFdo#"yL#+抃gXw2[Q+TOx%GIn2BGf$NÄ'VFƨш#93U9FB(mbjcis[y\۬WtٛߧreRPMHhkD3\@o8F*D3dG +4CNB 1DwQn<&@WÊ\ EV&RuJzqsKG~˄vb̓0 w7/:q\s|TK%*Pt@m>x]`DNpbXH1vT/DL eբfw?P2 -`;-Km0DՠS*sgIt&d+E{ L,8"HTieDG8nlz8d_Yy o~3 p|Ɠht8bS@E<̟DA5n`u|=~^׌{cUڷ*hnޣnZ +;C#w]ށnHņ'LOȡ˝'H2{ai7NwgieZ ~7+)6nyr$}pӼjǍY6MSjzd,li O{ .Ѩ,Mڊ*7Mĵ&JAgh)UfjNyٳY|`sVy1V5]>1#(e4Uj-[]@>tnjƯcfrp8;@!|&WG/z7e|6M'G j7T-+Nm9Jk{+nޮ*NiؠrtvOc?UTD*)}q~Ҝ~3kΪ?ȠsD{V5YN7>MsEP@&b)sĊvs!$6^Ԗi<7vZ+n'eU5pugQq|1o z8p6vMwǻU1Z ңԻ1"i#hZWT j;q0drVD)UZAH+x&)pjZFe)A@r8zmt:^rSQs-9˝R 3tB "v٥\689V!rYGJT HjfH 6HVY=XdzX^s%0*ۨD[g"Dmz=B˝}m}6 ^I.~C <ϲo&|*ߛ<_"z W:>tDb^zVV)?[NisՠD&(TӨT\Jw\ͼ.ytҫ$aWp\*T@ggfr,yi&g"Ug?> `MqTg6'ߟ^}8h呱VZ)@dQ(dYY!.kW[(.Ke_*>(T ޟ*xB XR<ߖ-$/7̪gŷ _Eޠ~A}PlTf/~|EfcVTvVOmWy3Ʋz h<||y\u{3ih\MեWLo[Oql,\s+ɝԔKD`txm"'l'ߗYGCwxGJtnFeīkvl[v+-^+k)L*>5%aR͍M=NxQ <^I>qt>s : Y\o*%:8F?[)A8Ϲ&g>)x!Wlu;"E RrS0RafXYɔ|*H&H OܾXMQ݅'֝ϕcD wQNU&h k,Ɔ±Yg&Ze"/t'A.28B lŞ GkE.$6h2Ǩ%Cb 0NR=mI$qNO)=%X)M8߽,mm(?'c]M7+\βMq-fNB6YoP>\otٟ~ϯ//5~?eĥHA{[t"mٿo{2w6/|4EW:+K(|f>WG\y ݛdb=T"6j~~ h%ITlz[}.u_Wޗߙѵ3鶼xcL˶QmKJ(K|je^t2 y'IY;;uy9n`glZFSQЭC[%)^(gН$OR=GºF ;c^|Cge4E0x՜0o~4q!Y BX/ZHE|\2J"N)p^#j?Wjq}d#[r>bzV:ntq jq1OF `P ();XC8?U*H+)m-uu~X~?lAzu՛ނTj+!wRoXJA Z վO//hx=@`0KÖrAZȪ@W]aS/oaZpcU`7gD buIH!9"a)`ex0łiM1BGgO訍?sa5I=(%:cV`ZNE,Vp*UjX]or@qǑE*$!Oy^\PǞ؞俧hOn[E,cC:*R$䗦=O:󤢲M.sTu\) ڔe YOR7?m*D# ㏗oƈQ$\Ԙ[w;x%MV:i!L~ ^ 0پtQqӱ/ q%}BBy{骅S`&MG@s.5[/rDž}Spq~g<? y0ϏZ{06<~|~)iB*"%{EfWv#֎~tQeNDm{$?| *ڇos Wp}6;N7'7/?=WԖ%Z+Ѥn06d?=:NU,һYy2s|qy_X*(aVl8u=<{A((UPOt1*-ң[vwvn݊ JT+X+tB]9IW ^+j+uKq9M㚮^DWҥ+ҕ3P5R\KJiѕ+yhBk6.#,g%OW(ma ]GW={WM:pYE-!Oi4_ ;i O6Z՘N `δRaIH`̙3Sa?ԌrGgC(D`Yq%DVZoJ+ m"] 0U+eCJi/]WJjrmUwa몉v (c֨+OHW c5R\Zt%t])% K-< R0JiוR5*Y3(3Eq!֢+]jy]QWHWM=cW t?vEWk*ҕ⹅2ע+e[AlZ*`h])mAU<.M 2l,)xߦeNuZ!p̱^~,[6̛GvD(D%qF[ت0UPI'e:k 'uxtiZѴhZi=iDl^|ER`Jpyoh]QRUQW45u])nKוREt鎜HWL] 3tvo(K[ct" "])03v%TOgPi;wu]QWƮ[FWB-RZJ(ѴuEk8jtZt\PPUꊑJȅ> m=OJi+,mñ'z:5LKwCsh]ͣThU%dS.酵'fxj j* |VytOά=3td1LQMEsj"dōD(=BVRE"dNj0X])-u%=.iz]| f#])ҏK>wJ)j"]'Rz+J)V+T2YlyhjѕF_rt]5JV+՟+ѕBRJkVH.ƚ:|*Yl])J)KKxt"" "] K?7T3Aiɕ+$MW+#SEXؕEWBKϽ%u蛮^MB9gSkBY)ܦeVu:h+ 2:΅左a鬡p{̣ŅΣͨa0UqgEP֝pFۻjF9F, A)VciōK -SYzvK&5 8"U+%Jiw02㦫KOj<ѕ֢+umt"B"]!8SܥAͣ]jY`֨;t%뉮C-Tҵu4v!T+ťj#1ŏ])e RWd0V+fJqע+o` u1REb덷JqC5CB XPR]]ysX{s*i,Oƅ3Ϥuˌ]ͤ etg,E;ݚ3wE"] 0b=A]xӲyu]QW!ؚ p4])f"bs}2Ʀ5YȺ~rK+cNǼ9ŅXLg8%)ft;;䗿/r( mN\K~oPۃks͏eOBߛoڕξc !޼7Om/B ?ܿymJEws{u-Q?ݦh8]<oY}oO&gY3 n?[_|*KO|bo/ww|jOʕrs/miN?۫w];UlTvaͲs$*|||y~ispDk5GgS:?Oį鸞g?:ok!H7?] [O7opsNDCr"M1!гz`8JHKDQ?ywsg?n杮Sk}_^_o~zǷVI11SS)3M}n 6zN߇!t0"S]|ȶOΌGSipΤȽ͢Ř~?wB"5 ~raGdFi-1Jc^wddz= eN^NKXᘴ1申PKzyZr8He0xJ1Fi4]bCmK+sp݇K iB^80яv,ȀH s|LrMJrӘhK?B,8Y@ZĘܸ~4yL&>% ?h ٳvJ( qo~(e 8o&;J06YIJ c??@T\U8M| R怃귆̓hւ PDnD#Ȇ#.]o_!YdCv@'%Qdͽ^,}@}yɿ$ǔCý O>1 )OQ3,!(m&dDrC">Gg]}rÁ'9Cfi; Rv>Qf%.7~+7$Z] Ѳt-Fcšb5DQ[3:%BxS0f~lFV'z_08f`s譙L&&Lirq\)91fƽFB9G=``r rw4SOqrw{b~˫AB+v` }`/<&[xA*{DQ]koG+nڮC@df`|1gLzEʏsn([%ѝ歪[Sݧn< p-Ρ]K鶟qLOL$[`yU벫ԕA[4UnYyYBDI' sG@A. L1R"8()$ؙ5TTť2إ.J  P Sѝ F4GQF,T RI{RH6Y'j@!OM JQ]R{jF'e] %їTh#Q4f^n!&Jy ($dA"q 2Z*ȁ  v/P߼GwXZI tfpϤA91M^KQEn"#ŘQE!j$%0!bE7fC.mgⅡ/Wle!9=ܲ]-^F$>Hp"P 3 o d9@X8pi?olK#+J$W{HV*X(THPj_f`!39yjN\ 2QӪAU k2!a1hA$>yt_V 5ȤuՅ:vTn^)X:?)j`J4TU;+Q8mP]֊ANEVEO7zlqz7|2di"YrB,GK 4"(Qw`ҧ6HA/&9І(T—m._>1&#TϺ]J@e`7PJNZ%l-ѧEk@.Ii:B 9^AB /XC-D;)g bro鎤gcyBP @ B25;5vj:LMF"h4(=(D;䍃"2Ϊ ת7 ¤4,,H1#dȳlD(!c(jS,ؚϽ?vCVH'Ytg#Nj4ZI@YIDe6RSҭ*p/G`A-AT6P*t `?T%LE`n ڪ҆mry㠝6nbdht:i'yb#en=nNN48z4:%J$vC(٢bjЭ(ךBq$JVO ]Șrta#[NJ3bO*Vh  EA9)j=TF mݔ"I'%\IbJtPXfR4Č*mz x"2`݁zP}V=Ö T Ǯ;bE]Q("Ndvikn3O|AW &eCɢ6 Eb$ӽİw:P)ZsO +QUE :Ccsjo7i]0s0݁XV=ؤ=ɗ U{&dd, D hsOk{ikP׮BTjoZ 05^%!Jq Vp*@6'=`eRt_HO38%0^ĉrRkBzPrm⊑4 p@h JCzul) ݌˥!R`PrAuf5$mY!( i?tAb驋:قPi9ouhd䝩y B.8cUQ~=/.͋j(lx#CMNEI#w?Ua dp@ h7.R^):Db'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v:_i@^Fi pK@fO j!; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@O DY##8(8"zN D;(*c'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v:">'PTP@R DQN2ΫN v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'q}h}ԟ5X^Ξ VZun0!{⣶-+L$, vd] Z<SُRE:R ;2FG/of_͖hsALoNsJV1g/ (%Y{9Zڄ?>Hw;+*zWGV!矾fvX?ZNw L#:ԞnkdĹr`87{8[ғsnR+Os+QW5`j \Q/P"\iG+:p]<hyp(dz:p5>fR`PE+a+/p(Z E]=Er^(`(Yhxp5F \y/AkE=\Q,$\o>+l+F R/S; WX҇#)\{0슢E%՟-^=A`?8\)}h'ݢ}ERw+puJ3 m_.Nۅ~Ұ!lL/z' 2+f W=m됱}hls Rʾg_zч孝7Mi{j3ޜ/^=]mz+K֦Ef(!d[3xRJ.̺PGk)f(tmFnwtaGMNOqAۤu9͗;?rxӬ9m hKۃ,1)ʦs6ޥ5]ΩB:c ^˝`vr&'sV09^'HIq(p~n?,12y4??=v\:n;U,OXPǣf|C5$h#?ϑ <5_?_ﶽތuJexr=QǷO~NVV_׬'7]nC?x{ox۶kp;[:;h|9?[m}{s.m% ׄF4czsb#{¤+QH/79WSz|i*(]_ќz  W;nwgv?zdy5@kG2>Q:S4v+wݘrD%NDžV"zUOBbrYdm^jv=~Ǯ#YNJKFh~KjF礵Q%R"9mbvૄ ']ˮ885=\Ak{͊OٳK~*)SLi7,'[-fL?#Vjj }i.)(Y|eԘ EɈZspO?uE;Ya˷15n{7z͸?%LZ4MN1_r3b͆ogFJa!vL _~:@}-SeּR^zt246&x+W#j-{ٴOyR9<YNzOB>Q_9;y f/+{]Wljnb|v+Evbs0?ÈBVU%R]zl2IgprbL25mRhі{n2PT-IF&|-b(v~zA=AW| ei3m*e9Bpq>FIRgnz n5u 3>DWM+I%{t2<" L![TWRUlmJPHMU4) d|np1QN?ʹzJc}p򪭟Y>o4Lcf{\ 1WӉ'2(/}ux{ HlU!ӒVipfj{9_!]e\pF0WJLJ!TS`ieN)΄ш}RlgOQyշԲML#d{ OI(IZ`|1"gyȢTdp*dVRVei %e1SAim)"d\+L E\vI5vv[j󁥘]h3.H!_ZF9߆< ~8C ɀc)KZ4gRHHFƣJ) ژhTi*Yo_s=M[pyA%ong5=+nM&vo^ȿ7~4rtn [PsjY1TL3eDdK2d %!8V֩Yv_@l]+ ~9;[ tF{zَ YZNJ^]aرWbW.AB;V.iBZ-pЃu &5B16xrs8r7,$J¯Gmi9xh3A9֓hIa`KΒG2[^ʬ$qu,b@4: \t# B+\d'eNibXFa*'Tcgʟ*{@G `S/_Ot6%zŀ "2Z^LK 4. Y˔hdUAEƯ̆ I@:;Ik^Jp@IHZ::EKTjTPܨfA^4.8E9xj^ogSYɽ`ۙ(t~zgu+gOi*)?),Nku$k;(c\/׏+7p;%cҚ1%r2&Mef!pИR/xJU5q6bdO8TB)[*Kp#ZLJfnXV Cu o8*vyU,|6IvOK7Oj@濞M 56yMB dp^ZIW.*yj&d(Ŵ#J0u5t4g/$dB6ڼ91P7036Y-vvk8;.Pvqvhn[u\]f$P<AyOWJ Hޑ3Jie+eIlL$Fdv5y-v(s8EzՑkHj@y]٨G5oddz1 -|ZnqZ!+yzzq0g?VP!?LAd |Z^yGc׌WצQv'/-}\zI\ׄѫ3l4* 4 9 V4U!+ U4"$_!__H4lP1L2G&0N_:v,\|0贇,l[Lf;=c$$'!9iU6<Ƃxd52e !p 4xe9TAGή%Ma J*':FO󁚈)ݯR}u\[F|WvGtFܗ?\.6hT*7>8הD& M4J+J.FuĬ|YJ:tQȊQkY}N>]sI}O[/tddf`(/{Sk'CKcb'U9[7g j?bҭ&btEĿtQBmjكzoKʷdِy~iZ잮qNȔwU/>l_`/g!;kY?+GoU vwIdި=Ier+k9n{\l']~t$n)@.[M>ݬFo'O9[ z3f#~6g%H#۟}Dvn]=(}=,-W>>k~/P{Ax=l~h΃rL}4׏qxOd;9COX="p?mfPiߑX_jBF4(|DO"&/1u63 =_Y)uvӃ3\}7mgzUspfXYE'Ԓ#$Cp9cL`\+=bݔt|`נ|duj<$v{H\Z~7sFO6+- "9d aV]f-GpPrG0*P BbB% uH\` H@6tMv2pȠmGd*h6kI(%YpG$xL4bw<npݞY zQҊTZX@{#IR nJ.zǽގ/.OӨzzUϨy4SZI'YFe29HM),\zk4 HL|<`iuҨ6)^5c2% r)2dPFԁ#=YK 5Ƌ(A`yjR?" 兾Ek012j{vӸuLVj("ݠ$BwqPց6䔥-]c.~lpRbˆ̔$Ϣ1ja)Ӂ)| 0N|R2/ild:9.'9|p|9-\ˌ-9-Fz19 tJfួT҈*y>Mt'K>_x}HV=L<z{p=? /!/ġ^ܛMs%UmVf/,w?< a_{}t }_Kn}~I='fò#$fߐ6.m*d5]#$ -1J"DTqf"G1bn?x yJ%OK-RP{L93ˊ> qZJm$4W^Kk_Um1M/Gi y7EwG7uk[p^w` <2K]ͺdg٣IWtY2*k}>Y&kn厣_#7Fnlϝ1Gy6 ʕ4d6[m_&3rQɌ(/ *21TجWxB,3yz-`Ȅ2J蕔KB*2&ey&QUQzeB,b(Ɓwjw;h^W 6ZY64VJT.utLϫUBob]} oެ!ޔ_ǫLSQ OObXIwt~Ķ4v|{uӸt춺/I.Z Y{A{7V-;7E-}vU曮>hhb(I F &k$zjjifk76V7n;k54&+o'gW /j}ur^\^-f5l>MVE(O+XiZB|;ո[z_^^_hẓ|^ߍM̍21434 g4>P{ǁ*@ =#yk& Fxc@RAy 9C9N`82#Ezb {懍!S҅"]~3|I"-Y3Ȱlg†f=I&ଔxʩ# *Õq`KQk"tD,hac`ucpY4G#\V<\L^!De:IJ `"v5et1R4N&%#X TwM6i ?pMq]^ij8o?I۬=DVhsWͱW5ɫ7vn"f9vPԪizڶq\R <&bE "9+9)Trɤ|1)jT#.ȩ{EjVq]V% R&0AYbvzZ42|{cF(5U4{ xtVwXSq1.yzÛӟ޼M`~E ; EX fcgJQ8_`|XoV V2tnN^2zN+Yf-Nty%)=Fn(c1͠|C˝: ]V*j'v,B.. &eY_ʯ*,:%P{|nA$zN1-7! ?͚t6m)JdG %}$vf NfW'J. Np9?]ŏH+ 'jMj/y~ΚT%~Xpp?祼.ԟO^s;qUr9–Ӌ3S45SW+dLMg $bqZË;6_}JT_JX5e_X xp= f[fٖlgI0rМ Bv#p.1F*WL,#s*NE&~~:ҞicqǴF1幷u|Zf$Qg_ڨ3$FI~Rg7&{f>fWp"ZUz;tӬ]eO paF~66[@]r662%w1K@4J@fVD>egqTiߚ,^/}9*Y"ur)\ІH_ GkA`h.0Vlם͹dR,]n ], T|8bq\7Fx=oo~6m1_2Mk/|"/D my_,ˮj6V͔Æc,@buGm16 7ɤX^ؗCziE-/ǣR·I jMK%,!Q:Elɩc2Bꍧ4쪧Lq&lsmVV5Ş|&jc)jޟ~7+J{.sYl'X͛ޟfc@7!hI R9,f0LJa OɩX8`:4!Ǜr z] 5ژ1dSg"r8<H-SiC<p(n'[1(bn Z%0DPPiS xDYBD$kbS4M)/R4 m^KNy4.jg1a`(UN+B6̡mrQ{rYš#ܼ:sUC喐yP-~I ½A;Q(2p""!$s64|1ISǔTC!XfW GHA xJm|ըYLBQ#ՠ0dt gIh :EN YcpgT,-~`〉SA@]{M x;.IK=ؐ^T_ω<1rVL iHy>(} i՘g~ϓ>c 'p[cy#KňhQԋ@#%'a=q4B:BјL|!G(( ^XM ٽͱeN*!iCq@f)76uOPTmͭ |.TŎE}VE$ 61 A΅b!Є7@fryNf)R*}գ9Xc_}lv4&!jGW8EiX-񖲺%K 쀲pHMD+C9W`39W3ϐ"mϹQ*\=``Ct+pugRD#ZIH*ҕ0]e:CW]+DKi*d+i!ʀ ]eFutQtut2Ktp ]e3V2JO !ҐV;A ]etQCW]Ϗ ̳NAg KW}mtJݲA]z]bX"US; nQY9Ɔ9*E(Q5kplh|YNeYu8e̵rBVU~*S& +4P\P+$>o^<(v䣺Sߝ{t玪Y$(H9w%X#KcC*1Ʀ|Cuf:ceth(z3p/]턖gJ2 zڻ kO{>RYYH)olNŌ&o("E2RH%xbJ')y`NE2LGE,j#9X/O{|j[-%w58 QJ5"VwLŕLX#2;J=5ӵDrUS?I`4N @2K0ĔoB ms&0NN197D=YUD7GsMiYѓ]鿏DG6ZQono38F!8ǿchnI J%bSJJ Κ'2ZkCju">F^ ew2FQQɍN^Ѡ{g{ a} ! 2{UȻe[E`ˉWPǓJxg4/^)Fer~=QǙ*EXtʊJc.(]ԶthmHf~=]鿐߿K̿OFN佞l:ǡ7udN2hx_hoEM_JmG˩`oݱd{lXKsC[Z?_o'q3MH65<zmO.Dj2jǣb+J^ҕe}p剥.q^\._W:8A XdpLJS[& Rg-N(\ȽPKwQygLDYĬH%)|I3{2dV›c.@)9ސvXZedE,$"0jL1&N' )|^㔡@޷༈Zm?l+N^  Aּ` E擋3<뤋5Xj/6q%{Fjzpyد7b/Ɓt{֛uBN<:kwu\˶qn| +e<@ˢԾ?)K-vy}q^f qQ)%bR)k}8ќU~Ai |{sz' w>ų˷MGVcn۠y)q՟޻Ɵz]ۏ|`n{ї6}}s?r|֞m6SO0|&ad4/,fyB. y:_K-XVL9Os!wAuي,bDXKL^EFuBY:¾͏EѪs~1Г0D#+-Ze/CVT (<>jMF nv=#$#u*RD)TiDͦ'r=;!}+mWMZrKFRRqW-ZS>W;Ya㑎%$@*@`s7!zAkd बID] lhJDcRh;Wڲ3%[g[42go>i)NĒqZWn[U&a$Dܻ^nѣ;"'a>AIO<&}؞jlw2[ - C)B8pFQR e@f 2'D-B(brPR^Jdcqi[1HFfGv\6bo~G[ͯ+3ޒ8teokv3~ *Ηhȁ$.?qZK8(zk"Oz< n~|K~RmYȏ=\bM,9crg92AٚhU@v50$B>N" 'O.2P"d8DȀN+R:{0# "*&2RLQ,(JM2*C6T SڨMc"ÊQA!E,-R2f9M)e= ,\}:[OVZ:?ON{S)WE$r2Z[u!Vh`κȾ(MܲYM\o׋,^>]`TL9Q}5K/\-v}f\~Rr~g[>u?\B]H[RC)2X"lW=m]N7sB/V/=38Jj2R Sʶc\"S֙R(1'͓N҂Y%׶g 3<{>ǺʚdE7# |;YU,J6f9 a}N&|'SU2gՒ1KI0\O1Eķ%;c}ˮJ3քXÉ{D~جLUa &y!hǮi(EOb6 E֠F_=(DQZ/x63vB>6\k{_SƶjdpG0h>S8P-ӳm#1mL&hUGDY%) hN< hN+ (٨S i˟" X_[A pكςv`"G Ijr>Isv[4- ^hDXn%,3dQ?䍔ȿLƬsS)F]YCΥ@RJ V˾$ k53KdM $ %HdĬL5AvF|ȑo)mo$@w!0L9mjMl|5ɮUsqi劷`rt AwD#\ Qx So(lCiY9hM.QsT  93NކM"x-&+tY+:A&O>F2ґ!gJdd IGy6NS.|B9푯[lΓ?fz"8}սn5PzgW`O6Oy&cl7"ELFd[j1Chc"t/Y[]tVWi"$!dѢPgr( ŖHZ䤯5: *a/E蘐|PBBQK"D6VFFQ.;䍤5D!N9$ )pwCݰٿJEZo=!'Wctɳ~l6ͫ VmEUYmߦ< -xX'!V{$Zxuc }7yY:k&3i27m)&; ye}'%3faW޳ܓ4r{_;_ƺ½mO/r?Uu*5*y:$7w}Yvݮݝ;MMu um`wOض nnqknzw|;/g{:m[_w-;n*{xng=5yE Za:out52E{.;b[+ќOz2\][swS>ۡ\ȳԷ0C"mVȸD_u!k+f⯘'R.5P /߿X&߿? vM|+1Mod yN]fzO0kZdnfzU_9ٲ-^v3V}}pūEw85? ^B豏memҋ&ޖV>v tcJg^׫ϋwF_-W ^}ArҒя?4Z)+Z$ؼh\H㕩W{J?zQOUwRԌ~IYW0w&Ugb Q i2;ZLv{fG7Nl[§m7㧭5݃GHA{šJʒI+Yj21< DQ*ءc'SJ9rWߕ\tUA/TO_uU$ӚdS ֫h\og)'Id Hngdu,/ 0<0BeJ-Y r!ۺjr2j0|u6˛T.L% +1,\M~mnoEuue_['e:"()kD*c70䊶J`JH1mt~^ =|kSpA B / $ޗB*3)Й!Xk`[GbڬE 8Ξ9zBR|&/5`LҠNh:32(EtVHD7I\y~>ⳇġ}F})jiÍlėZxueϻ4o3Z)&kB)A T$m!= RC-FvƏ!G@#q&w/]Ή N.d 4%)?r3CH~hzWpK,2xU"3ɒ' '81/\tw+2Hѻ";U/i4~Ǔdt4}ShUE*Ժvܰàvf)^ae̡]5 hbLލ{PV'wMQ{ =ۤ{v^mD\=#:*VAnDgٵ7՛/`3M~%ۂ!.wn6]ׯì`u]klu-\!etΓBMGv5^ԛ;4eZlÞGYa\QeFtia-R߻By5NLSҸ4fݢ=q?7 r|նlp$5x5hp ?ay_=٤ rf~0$pּƂhcנuW #82 )J :NEQK=34##Sqm 7rnI=7qZ:jX (n'$ -F*UD$ $ pc:Yk98b (qK{׊Y9ӫˮ"o7vEHF.򦿝* ;&Aܝ&ᆡTh'Mx [BF YFCKEI$xmKc(Q҆Kmy24׉EW׽xEܑQ1h91QzJ 6@gJDω1GJdMT,r \CS^jhAZ!A&'uDc\b E' I?޶]-rYvPlP/ )_Pd:T=j\Q^*@5MhpoVGEB攁 (!LQi{fO[r;ڒrv,M  3 ˌʸlzk\!J )lY,W0TZ@:޳$4tJK"'5r6J쒘21ýQkkGjZirD) t]qzM x;.ޤ@K=ؠ6E@O?2SghQ #B3-I9U.9 \sB4ZA-?vRo'La./Owi1Zz#bKD"H I%|8ZQ:k'Vf~Qj % +!Df(tu p!GXʍEZ(6T/m mWXf_IW5Ueo h.@41 AC@2z3J LN9i,EJOx's06/hCvNx&``}mlRVM^(n'3qM4֝MfWU+m%F_"B%ʜ.P*;5Z7ԍ݉'"<:w9p>p Fgy2Ei("=14S3mT" (4g8iYm\D0 2 !RQi[lyM O {tvfZzWlQ" Q#%&^ E%IS9BpJ+i=2K1|BW7V=ueg,oRoP)*)MnfC@t\d P CqcPȸgqFL&º !DBkHΜ&7yG(Cժiz|?Ҵц;!KjDD$g%'>EJ.F1J|vi=?e\ X+.Ey  dр23*ҼMV1!65ST TOYǼ)~ba}q\__߿>)8}/N__kH3{/Sbj󉭲poịρfMiR"1v0{E /&oXMpsWQ5.RQ)s*˛\-Z~{5=o.X+_'uuRw\]zcNLMKPUd|[ۓ:,drr?b vE"y;W_dlin]йI8sSλPޢ;;Iy/J*x(;O3h4/xդA{1{eӬ1.d^ #Iu`~[?(h*j"c}r\jKk73[}KɮY6|B~uac)߅c*trr<}wyx䒠k̥FOp*%;~=G8h^U4EQ_~#]D1x8gՁS?lFG7?Uqro>ߖ\*_T)Bj0uu_]CbW__d9Y[}4su㯿^엷&[kBŬ d>׷:v4Ep5 U-e{m^ NECt)c @'Jƕ`8he4<T$iϽ13^:2m(<ց΀O4"_Kb:{qiԉ(u"ϓZ9Omk{sU_76|;}/U{olU`-RPo.>koZM-8aàԂsp6g(VObFKi R!ФuHj(|SߍL I jڦeNRB t+Ʃc2B~<ͅ?p1jQd;B=9wy<ʀ9=0"ALjM</όdXQ*T"=١l QmN_PJg.7- whG0>d x-ˑThs`<$2aqnouhWkgsO6=.2mo< #17a{ckxcc~~4*nQM{~YguhnYAJ)_qǟDd4Ba{svy5#.ϯ{T|@IF\TUpѣ"Z|șvNjx ⅼQ:\I.{$i׷,?Gof0]o2TF]h*WO׎|EɬVb8{lfw6>3'=/~z^QE_,My4IE`hV|zm`X8hg[/W^}RH)[yFHR gsh8uebe=juHm`% PRj"ȝ "tJT@A!Tj)'[>i6p1yNY:)9v&VꦇI >vJ'RLjfHI`De@\0Ń@.WnZ,~빱1$MIIψO9rSb[#咆R[kŪ[z~lޚ9rV ٺFG1 bjls\bztv|zs56-OhP}%Sr]Rm0dw$== _l_Sv~(i(h5Zg2&|A=Uhb1:E#Wֵ ?uWî[b͢W}eO/(ufV?Ѯ֝'Wښh3(vF5jh#M)A^V%*rԭE$*CcFx$&Kl*Ř[IZ#~ ^9FӴ./Wo=Fڦ,;޲|$ҀT?ޖñLH/ˌJDu %^ *UM=& ;dBN9,GT`Ssy`kEy]dh$#R4ln],3j@G*!߹ָDphI VH hkl Q.AH0inm8kQXazwes}21=^}jlRH=0LgZi) $S T.a "6zqDDh}tup8j;r#GmG-QWQ)dԕB]^]?sx 9 g㨬Lߴ|jFuS r?v׳su;`̀)D'I0yS4fެ'cVA@vZ4 6Ǣi?˄(EϫftLș Kt;UrJ ,+'Z}$E咒5hxiKʉ\ж\HjR*kVGB2HH`}oPY> u߲ޱ (! 5*c4!$2T1%-ҧt\\,Wh8F$cF,&m2$֭(9zPI;-(l" y3a~{ Y#:'>ڪ8;A#.T HTG5R=O8p'}_  #Ϙ66H61Z!L€ÀG:-c WD%e,h@/ TȖ@fGγφ$Y11\hZx[6yz)2E(^JH>g 5y#epb4_4 * 6~j)4FR9+T|) s9m B=VKy#"4T9eN\"(SY39e1|1lzCoE)($I |4)%`S'Bg.R]1Jch b՞ M~vWl[ ŶٻnԶ>;o{d}Cn|s/3sEx7=&[s:onLa'@dN[Ž?(O~eAWgݚ]HF%T`;{s^4 EZ MV(dކw1W|.;40 V'D_oC5M2}jZ sOC~[ `xBeS$+&h|`4.f35dcOhj}^~zy~g̻T+oCc=ާF,W1`=~h-FI\l>{3yķnZVՓk_r\|5hQQh{SkeC +j|>AAJduXV.MWŚ隋念ӝr ŝ5[6luah+j)wyq}پh[us߸_Σ\9:hvx>2` oޕ|yk˵N{7=]>./ꕇzdO'k' ڲk/ؚTݚݤ+_lpG_G7ѻtΖ?ݳ lݺ?]{wh{P{?&y}<+oW iXN53]#&pGdcZiBd7[Z맾hŀcdОfd2x% Z-@*#cQSؔ(J*AEQY6ta~ `"]|F9*:V S(CDHDZd0v_ Jǔt{݀U:z /vN֝.K}uod> xJǓNkJIbz /R,AD,؀d|hdE#ԽmQ1C#)@TdA6 rȎNR6(d_EEA􆴃RGb>&(c^+©IZVh Њ Ρ,m._|bţqBGCDJX!IɮhF?*a|q񡌪WK<٤Z^xiA"Rٸb&EgSu߿߼U6qš㳗z+.yQ#I:M,XA%\TYB"~0?JL"兾ۦEk a8Tc!e M 3gr {i%^tѥ~jF5x5~X=G7bIM5y??h8\hmg9-Ҿ.UcS|HrJfQ6_uBO˻OLWcr6J}ωv簬.g=z3>X5{56yy%m#DJbWq{ o~-p] &ynsbl>QJRm'؏d#5 ջ'Ys}t&Y#TsWߧ0PeSlҚrv6]sy+c 猁Ɛ[=c_KCywlaɽ?.jv0]XMξGi5 xVaW \VU> 'sH;5 5v;NڄnK.nY9x*Hxv~^n&}=ݍhvq-Qh*R}RqY @eYWx؜Q!QÞ7n>o5u =1w7^h X_$pvLY;b2_:bRV;“:L4"{U|œ4Rgºfe%G\AoUgmv{]3A{U,pMǔ;e\m~FDO=$wog+Cǀn^Mߟ>xnJǬ݉1)idRKD2=!+$ME QyuHQG.,3~ye; (u(| ңI^BglhF+*&%m )rèQzuoڨ߱lob3:DAMu ^W3ݍsQj<x^Njz28ŭh`ER +=jR!lBM:n|vKdQXЁBDX-/HDJB-ÖX =1dq]@xbƐQ&J&9ϬHC7\ЪC"/Sb{b +>lަ$aDb7@CR@/"Z2$ ?,r-G0rvbu"ٶ:?ʀ6suVjsQr!B1'AI{+䍌2 EFE:pl0"P@XHQ,gr1فxZ&vl-HEd֙"{oβxJ"0\})S{2GyYJIYAUGA6P2eikaӬ`L54IM;{m _ dR@$DL+-rQ$J5(F18ıu jwlTͱ\vl:C!Tr6KuL5(Je?Jp \^`[P3 :w~0$咊"l-2\0^XJ[βf>hv 19T::Qk vm%X6ҴC?v'DM;.M^&DIg&?aRcbMnt.h_PĘ2ڒ\iI~/_Em] dE'PnKq.6e&R+k_aR z/)I6r`r )d-?{Ƒ a/Ɲe}A\ #THJ|~3$EJHQMGi5_UW%}FD24!D PgM5ݧ+| ӣ%2D{Σ%)BxWrpgڞ!LT$.G7=2ߠ4SUREZ#MQrGz^X+*iŦf~T~A{6B:)cj3εR˱ۜw24*IHU[mi;!KjDD$g%'>EJ.1&EÈ +=p#v HqIDB,8cgzA88%FA89{OkeG%aB8DŽX_՜fMU޳;̇ίNלYtˏO~~~y{O NǛo_pxlw&eJL[O]?ؚ@^}~ u>o+MM& .ݬ":arKlv?~bJ5^;C{wȑbMYr\l|,X4vࣽ<6{O|@sP>v1Ͷ8~3عJ8_*2yV,vɄQfؕn-tBy&l¹9TTE2JT7fа<գ:G5zVӁ1hGb(䄕k/I%ӏ-ġez]yBS0{D]᪶\$1W[h cXqET[Jv'¬RƎTcˣCeүuyYRߚ͸_c/tƯ 6_x_R(as':Yx}MpV^wtS*E(B 3+ɴ&^1@\^ӫ{6ۋ^g+u;P+qX}8k EH&>ͷ3Eg)! S8# N+Op&yN%éH$g8/{eK)Q[:>шz-U뜭iQ'l#ǃ]O*W)t+IdUӃ|tNZڔݤ qBOtJ6q'c:l"{j4HRLPz\Y\Q*5"V,O%47k~ܢG'}`:̚)k&,NZey)Qwӹeuz6Vw|˭AiICE:.Y˵pAb"&n$сhA`*@=vef W 4nor;ӖwM%|QYuFx] ?F/;k;ޝ՗u}7{O%%˾(:hPͧ&:,7ʍpUYj! . S NTfXQKӂiOkM-Ц%C;f#PCHkN4{ A yE$&CcFx"&KgLKbLx-Ii>b qW0(-h>=wKYS~EYʳdgoܘ' b!@ŽWjk57Eg/yyk6E'i+uqП7y9{8oWI*Ӥ3CE-,C[:.zqVOsL]W[;K/~S:@F.6E#t|/fagf$f.tXۆλP}wC8i~<>S^|ǹBߟ&%o2,_}v.uˇys|;1{Zf@+wTBJQ V 2V p\Զrh0>eLvFpSe塚\QdԎGM끂#yf6QerQÕ'^u}%ٱ8[}ej5e3Þ~xPp>},becvTo u£˟D$}4VhDkюQRH)UQL xbrQq fX e >P1@#ƀfgBL l"B,ٜ$Zz@1qV#Iy?4FQz !2YʁH`1|fҦHLi^32"tL`!Ii}IHhSHe+4E'RQ(<('qy QFI{(n̕&(%ygڡ]VObZ]]]|00yt DET@F S*ƹ%-V !J 'Z 7k?c_!nM Gc=G2!V^# ~Qُ^5C9d]o{R`WT\ k#:U*T65RIf:[J{N)%B]L%gqBCO7p1όS:錣1R!RH1_{7߱|yޭLȌk~Zxϡ]?{ dD4T$!I\Q(zRb X̥2o LQ0IQW0gw7ݿNi{:ti"\vX}q]oi}ZTL,.?4YZMR7Xh?dP`+װ1WYZA\e)e[/1Wrͮ/adը{;C =ő#]֠rpq/o1!$L7/4ƍjT]Y%eR#;^!i0Jfu1rDσi١Bˀ@70 1ϥG`J`c^mωk˚៛?, _N _eC!` HÇ}x0SRڵ2CC͚Vi/)of\׃3nh+Eҷolۯ({ԜPM1X`"2c z\2-x(hHH[̝Iݭ^f{IbC|* #~[s<.oV8P{/J;׃,x"o1z49\ݟ4l()}MZƎ8! fU8ޘ&.Ѿ+UҪ\As {dD`ohoUYG&-UO-+TA}BW"7檉\vIN+׺5П>a7dUY/ڴKߵ_/\`1\n2zrh{RY8=ZxR42 D1[/rwxX#,?{M\67i:nIm~Kمbbe/ٗrz`-GFDk姚2PQ}ULYc%V|:TU{}N{{}|w3't+,dyjٻy\lv_?o$Yv˯.X6-y?׷{-auvɝl~NIᯪ퉵<>t5__(lsDYrfHDrU#TXAok:y_ɟz@z-˧倈=K}=p"o!Y| e=[uRiqO]B}vzcOTg'YS]S0HiD G1fh(I6 '#[%r*hW:dX'Rw]nl "zaGTd>cZŊ ]{_PpR >&B9O[;,1>9OaNN;OhP[39AΆAH8SQ|MG?݈,%נM4RAa1J+@Zt5$q-kK>|g/+!n?aUЍyϏ.yP6G~~4Tr}TtFU!X@BACǢk*kNU4R$ &E !GS lZ'S1FQʪwnl;q+lv^70s{_{]ݜf^11bA,X>tOM i ;4l5̓S\- k.DI(tKýC{]Qˇ2M`iI(㼹y[`#+><~]w-lIp7ѾuNfǛLI3? f{Sw0V ǯ\MmcnґY{T2RFmA905JYnU*FXrdUBP\LW肯rT:Bj#pˍ6,EK[e _Fb#2vgeUn3cW[;B?'Wg;ܐ 拓 !ŷ~ dh>d8z4&zxFCVQXZ&fZYRe!ً(Ma`b(*Nʥ*w-^liv(Xvٱv`0V .PUdd&jQVmDr!TP%i9$F!u@I@Q[¶x5(vN%bb`ޥͺ.IeW3cǮ:[D,d-$>ͭWP6&W"g[&^־vAqAZDf/|ٙ(8m ¤ xnMz%↏ uV5&_gY]D.d$ bXxp&ZWĨXJP5.>]O1{wO`®7i8/kVo^ls5eEApS E?:K ʹWmeixlsY{msu~\}(}y>z!j\.Br `ƪ TSj%D<*osfQHLfБ貝 t)7pN>/Z սe^H??!z '79˒>Lk:u}OE]2W4Jbq\ 6T+ZqD(nfOΛ%ܠWˏfNTSЕѨ ĘYJs̔ub5*J"NVdU͆$׷JYbA^m 4g-w.%u&cFdK䘿TӥLaX(,/嗧oclŲ͆=҆{ akG)BTdARp (&(NzWЕh`]uE9QM r;c)-x5)C!d-7͞tͤr n<-sISהdk E)V,攂 [.2X0QY [4E|mʁ+ ^ۿ"XYk_IUT &[lwɱb>&­ 4.)w֫NyZƤcdH̲hV/`DԍxȳYDI褭9/D_dBLڸ zR%]\!94Gsz:C[pa]S1.Ym,EA)RZW1r>zצ&*䂢Qe- e61*:R&6Uq(A1J}e,USQm͐3c4+] {# JFCvmGr5ZA(|6!MwCJtלaw{fr̽c.hez]i`e.Q\ &;@TiP'/X%?Qx;gЪsqm}fɅ&2υYpws^A*6c:CFAdKdUv@6uWE8[{g]tg; w.ǶguK }wVs^5t\Ep>/}gIx8+ M^uF49%fvDRx wgg=S}#QИ+/4Vj8 {T: WOA*L mkaP9]dYZt83}Y9U:ѓZ !\Hee [EekR6y-Opo 5WxU]OdBQMޫuLjUxf2 WU^7Q0mViX|NG3PS?xˣVrݏHB{E`I| ;Oe+>h&Zjzv9aA(=`zMϐgY/UdĨ<ИY `b-^GX,,% ]ӳ7=]TR7>jEȭL3Q/b(5*@&bj87o^ю*VR4[fuN,nx"\T8 [쎧#1g&% RBH$(Db(,$]_AP~u_l/;Ӣosĝ&F85铩AqrU3.f%!);'\I 5ӂ~_~@.Y%5`(2#2W0QiR՞fFU\RNTͣ+_vwo4ܽx] {y=OpPJ޿-K%WO=gY[N^=9Ats<@ymfiw mЇ.&z .at~4Fos0c٘~˾ݼyBYȦvn[3=yzb慑a4n9dVMȳ+AP3ho!f7CYSh1SAw]uG)Ǜ0݇G0"Bŧψ㯱E|2J.IIs3&&_<9=Ň䢘TSCx{ekjۈր`1XQC^Gf$NÄ'VFƨш#93GM,)8fǮ }iӌ۬߯Rqu_'-;`KAi4!*.f¹(1ނGgq&rU,ci.h11<cH5CygT&:rK‡=4v0:sCtvZ;'bV!Bk6B(ƳL [Q?&L&z7?::9UfhD:u81l-]쓌6Dhy{U{9%/no6Gdt!$osW_,cb_5f !G<1< Wnj')2> o4#1w++̹c7,'E=]K xS:FƼV-+n_X}_)hq@1 l,3B(יJh⡇0K0 K?k| ͮ'ZqrDI(ޫS8*Đ3:M0*H$  <3`N?3ngeSԶDsj"}J,Ua2!;m2HH\dγv JBS:,DXTV"8v #+TT!H% M[ْ2]~HdcF݀)@zN@XFcϕ]xGN;;6j/1#6IkF~dV(+h sE(Deh1c<5E T3TT-7NlzSrH9r7"XnNHR]x04r\011VA}k 'T5J|H7} 1DV*XT*`@ZJIEZ*mˎ󈄔K=>k Ev%>25 [8G ij9,2/18֛g:"1`,\ȣ=)c@aȮr _45[ivf2E(l@ɊD ;u&l4C&*jϖPB+KLɴeZR12.5i,_D w8d:qpSZ%RE 0$pǔ0Z Ph"\J F9N){J;f2Rʃk zqɳҶٲR}keiF3s/GI P,N`: BaQȔ^![؛H4_uP8!Z Hw`"%IE$s2J ]3Jê (O ; /=f=P2?̢fO۠W†+RQ}ETaƮ±Yg&Ze"/taM[9T`#-4DdGS@UuuB9-~l]J8+&˩*y?H*05oSͣ,ۓ_SQfP¯S`|F),ic5el:/<_%V.l>s&+$-G?MUҟٲKJU$sr}xt׫Ҳs%^/󎓲3'%P'w;͞-,͌^C9]8~]j*p,4w q*`0|L=%/uLK{f<"I ٔWSg/F,B0#BXYL KD b )K+C*4{󭾟MKK\nj[zі?짏N*snˎ6kem2{M>jb w M܀gD žQ'($9:8+>a7+evɖYv&c0 Sw;KyV]0c5==p=9DӐWpF؊b{ <*x ^yx^mTŇAI H[y8Y>KlgeWGOj.d.$2 I/{([ ,jgQyb(CzūR~ +{חY9nqo3ٻFW!@bdZS#ٵ烱] vzGkIR{mRF$%v h$>UsOUWݚ] 7|okTwDMh"|qקxهdgd{,wt PtU/U?sCW]ّ6L}pws֙}rr)\ӡn):jLWe'm+dh̍d{b/p,Kєno ׼|~}w LGo_h-kQƇ6t\hܻ;YV5ZvnΜ<=ڞj8D@5+CէkR',$@p6jeލPϴ,I7]?6S#sn6]_P2obkq1pqՍW5bM&A볍/br9Cp91Q}Wnr:p6*1D} -bR"/څvRW7Ӌq:*&}SܭuKzwl]i'7gl7Tʕ[]'KŠ_QETnMLJo'|d*laqUe")N H~KÞc>#gt؄;e]b/Sȳr3Cs$.&3ͻ^Q%VJ_~\S+q_"3Ku<$O /WwWb~\y/3S.w]l eoٲpBoY|?{ ;yjC*P:#c OH}L|׾`ЋJ<~wP=EE.tz$] +GK٥O/d۽&kys[}T> $O$mo4 N9U5sWZ( vC$VsG8GfV&;p'WкuޤufV*T٨c* b;c .b6ڬö[ObMn/+Y!,'_Uy)Y$OMne1ɞE ^ {mT7{Y3Tv^ һ{0"u޳W{wK{$ŖMbE Һ}lC}tQ䃅ƍ,Ш3 1gs8ʜc9{)rw9e+~,7;Py'ɛ;\E&~Q&7FiC *\]͒MB?J(Xuޝ2XzVUt=~(޸tFPPaݗ V]Ìc!}(n-}%su7|0{XH9z];sލF[;/Ƶ\RFxT -L_}2$\wo~lHoaf޵|h:{}\&Χtzɏ^>ûY%O%U=ma"iOF\ck֩K{B(P c`"^BW@kg"ЎxE yJɚ&N Wj 7t"jTWHWʊ  C+BG:FV{++m'יZ h9<]J9>>F OWDWj pC; Tl#+85"BW@9:]J91ҕsR8[]Tղ"_L(;QLZ/NW@i8wЕ#`whWVH~(-]tG^XRiZysIE.Mx1o/|Zt,;>tgOX<-e7 9!giTɺ&Y/unO5$&3nOБ6Ah6[lX5ZV%Ͻewu NQ^.*+|=CW ~0veHWCW[5G+W ]\k+BkPzHW/BW{jRWly= S ]^;]J3>8F2LhV6"^HC+4\tuteWDW$]CmRّsJʊz ˪םЊOʡEs. `+d-ti3PQ]}6tfqivz=ha Cٰ4ۜ׎tp+>_Ny95fjNgd%5ԃ87$9j"HhT$n(be?6R3f"`U WfN*i!8S~5pZyԊ4}4MRZ^]IUvtE(J ǔXIU ]i\96wB3S]w#]]ݛސܹj3KW_/.`Gдnb:ܦ '?~X8vO7NzP"s޻#zW^J:A?uIg_b~C#8w]o-7 tgY^h(˫/-/Rjt>+\t~ۚ 97ڇo쌖na> W<}ޛ}npˋ^;yHYdޚIxWTy4/? +J?:*[[@Eq"8{o-kpBk-a9p| cˆ3%ʑWPO߅h2, Miryz5M/ 9\R̶pdg^bQn $ҝn$C;+? J~n  CMgU.U6Ę^F -ENM<0-^0ųNbr^wEB,d&m1hJ檵,lݺ;{;VҤ--pasYBDhY6rJNhsP3'Z[J(*sq_Jd \ml3m ^QhѵNz/U_GNwWT|mN9 il̪,Z]ktU(#&ZrF0Ì-7Cc:1NqTb6zmOx43+D^?:+yMyph`-n@RK&&Lcla*0 Co' X\LFTir:&ڠAN ̕#*7B@#Cw%i_"q.$.hi k,cŨ|2Eڤl(ys-NQe%ʈR(GkVTϰ6kw5$z`<9wT#i'&u O9}H\:HHI?@uh=ZX jt@RYKIEvQJ]L.t&f =j-X7\5/&teOC%XLa6x!mh>q A1 y-Xkr["=!xksAvɑ S,XˈwhlrX:+0>*[nh Ak!N44-^j,By&^Ab*M47s EˣU9[-ޤ,{eR\Zh(ZI[[gDџ1T1,KQp>ZL[$A6̹QbP \r;T%U2 m))zg E?,,pWO 抩dJxG%8Y2 Äײ1W* d>rcbvH[4~K*0ekXpLւA@:_E/PB]^j؁0g q+)f@ |(COBB3u R*nZt,<\jEĸ5FaAxk"f#fnB0 ?%Ad $UMSH uS؀Q"[V ~3X$} p24X)`쭲K;f )t0GRF[ U{Y,K0do0[. (Z!u72ZR^vUuA$"K&mDX\jEQ_U0VrJD J} ȲRNѸ2a+"ǛQ+̵댉p^` 7H3-.{1/YffZ呜$4(*)_,K$Nc N?1`Zftl9-,şwc*h QȷZw Z8MPj4gC1I`U.u\v)&Ylg&|$Cd ]XrA.ނ0Jklץ5HdX B#ԫB~e>iO_K7{5{[?CqNPVk:~TS{t[`?$`Ť?'Pv:'PCw(''Kt) '9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r:h1>[O}(؀N eAB@/ dvt&9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r;L+:&'țBVȴփwINኜ@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DNr$}LN H*- p>'u&zN զ 9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@/ t+gi^Rv}y}._|JɋU(.Ǜ@kSGd\j9¨ZVqE^q鋦o-J\!`U Wc?xB W+n]!` u(jᚣ9 Eԛ|=\!J^"\YnGWU\pш=xBpWN0i'yhઅ{ZiO}Z?^Lk[S]EtJyUAvFV[_ȏ蜛VֵOCWJwQGFo՚JD[1s'Jum;o]ն.h[cVxr݇ڄ}YzۼMeZcq*AvQn+~eV뇁6eu?]bצ?Oq0Y/; $_Lֿ`$;UaA<ŝ90ܝ7Osyo$8t>g&D3|.J^0\̦:S&~o «`imeǷMDwo/j%RlbweF6կ_NW/=I~[Մ0\/~{;޷~ ˳uN.#=]k||20Mp^ _W%aP^l -W& TBJO^nSxֆ;ht1_?mUqArJ Pk[|I^S(edFaQp㫔Al6S- C\ML'Gw15iⶸ_᷏sয়ԃG<}#Fl-r6'zqtR~1})7Fo~|\}FFwt SaNymW@R篾3yDK-`}5.jNB)KsP_3~{́-xJ;SY>gi/p:[i?v\r7/n>K㜘ow_oͷ غV5K^pHʧ1tu)nONj^L&6gekmi!%QpY\5 S%GA|oWxoӯtq[-~ݢ!v/o?uo?O>X/v?'`b8_Wb[5E|s%ALvpbOVB^aҁB8]]`Y>jclV9j4KjbY__X6aD-#@8@|:*"9js2"hUZUJ+ҸlL<8[2g|rЎ:v¹mºy!m5Wϝ6aMei; xnݡG]x4hv\ӰK߻JmnwL7*|HQR_nbFIN䎛PۘBI+jTĠ8ĉ^"`܍qpx,ځ>)n"jhxG;.>/rWd28/9bC5ɤRZJ!NeOpJ\2zdJPvU4,bK1^Ȃ 6tS;`]e+|ڎ9_EyS&ɺ{pF8-y(=Xv<u\* LN6/5(VǢYO˱Dp})AjcCN#RӼ$Y,&(%(Qx!\u;[׹bC n[cJ~29 DwWVxT*5"8*|BH\!;BHw)2PIJ *+ $|GLUɅ;q"ʬ8t@f=X8,ǐޟ>Rգ{'O0i=nαcqv?|;ylU6U\ rJN+/cjfeP)p ,6}YҮ0 a]E+\3ooƲ2n߄joŚvݧ>`~z]~3iWy:<,g/_t}ivY'SU|pk4],.t(.VrA•|޽:lXXZȚYXoJ2ZTm3STK9N[ƕRK}Lt;]0 B,"륊LՁSS˱CHTaD ]]TR7>jEȭKaRL GGQ!4F{p@Wݒ4E}Ug#:+MZ@<nz ta= h< pS I8ݩrrs1snǖ"B2$!VҔX ,$]|XAP,UO;^o_̴&F852Avf`fț]*(JBT㓞piƕLJhl %jUaF"z92]k(3Fң^$/ l@뼾@W~*7/v\ҫdpU8ߦiUpxUG{rx*9 c UzV )r̀\ h̚SYS@n,7.at4F?0tzCgׯfF0)e̳'O]ynz|kk5}68U '0q%R͹kj|~3tuzlc> [}dyiSXeQm<!W2tHH+fa콺|{-96塚<:=FTSIV90<}AZΞ+*cHѫȬ)r0hc01b$'sƻk(16䬡0֗dΗqnIk}/۲o}Z J `/VqI4E<:c4;Bd HFN;pF1t!Cygc DO j0Poe"q\w[)طv`9g^v-0M]n \ϖyO_ QrW/xje(Zj3jrq"v {f >jj1v`8Yz:qђ*b`BJ$Gb.C],F,M Pd?|q$p]]g++E= jLYm41jU)?KZJ4[1fdPv^: 5Wf<7F_nHuy󙮓| mS㣆1.Un}ցIFTGh "{oU{ن򻿻/vՂ˳>ݓzVO3X1@N 9r!q(NT \ܧ'ȸ09Lx_|{o\߾œ &)8y^\ӵץ܍Ybdjݱ2ܗمۂcPɓd:̴wqwWE>~M+㘆ft`p*cH[*] h0A}ڼW+{n57XnS RaӤXpNcY=UhRg/%|2ק޸Ӯ+eΘO@3zg [q&dJd(ͭ!I,UFS- EgȢX _94 DD`\[(AiF1Yavcz0.^٥z,5[Íkϻۨ^ϷS£Uǜwnv='uʆN#ԺqD, _R GNROKB,T!Ҋ3 mȫpGSoGXbq:Xd9uTQCaM EEACI*Sum0RS-y oA)ıH1WOp˪jklVUdaQ6sLj!* 4GLlh^Q,\ӑ(䬈R.$X諾vwUcSXZ.L ,8kP{kP&Ąs8Fke$DAa&|CBٰUj`H :qLrK"2y$TKQ2uJhci 踠ܙvIcr3[%(#@n1*(6-Nuu4vD4uY>FK\ ܁qR,@hnzəx A8t.6LrJj=fT*":Zv`~<-;vpqPjS]P2S꽈AdҐD1>94ܴGb[f4:2b` 1$S|rДJ%FQ2"Sֆxo`ȁAnm;HG*#R"%1 s2JaɧYd: D ҂ݗ+0\~vFrbr,6\!* 4[ܰmT[b ݡAпs dFZipxFPb@hi1( #ʍpjv.! (< dՆw 9hpL"[@O{JI) Hq^$o HW'W_΍MsT]"y>+~b^]ew?{u?9s؂x}ͫjۼH>}o=7fPΊN*jĘ}45>wJy") <`,ۓ_S,QĦP `|Vn0xdjT|G/ ut*ܜ;78 ~bϳkT|H|]ҙ*p>皝üHJL-,͌>C]؄^ 1ls-CwATZu?Ɲ)vۼ6i<Ք0' f8EB6I鍂P}IL?ZHue:.u D 726?jZ 2CY 0DYg:9y9y9Z/V~ 6`P3OTX}?_ԯ*W?>7~@˸:g8Gpx_:xW`x~H$ Oo?S [z Z cP5f*HDE*/?NtV %979꽃at66;-匴ܵȪ@WMY acoE)[^"g1:$F`gHk,cAtiJ8X7G`ZṢuc;:19;#{ kIXfEELr*V*eF;Oj<[É m7qއ*}AD-U]aBzP)g.=7jzkqc̱rKN[q?6t&l]:lnBeY!nچ^5*Cz @Vk/;[ 1,{2Xe*4n|fU*_ڪ*x%iؿYxQO"g>ޖ꫶ڛ_+ԗ2fZשh=r'#?^\aMjN6jDfi+QکTRfxREEbW<:& Yjpi22OUAi鳷pDXtp=~zJ ~2 Ã%UGZa){Q%@2>[@D9`βXgU_V6TԳbJ~@XI{(["Oy#Lȼ)@=mu)?]'&~fM ό'b'INXÿTgzZBL~V+{8|yXԲo}n^Okz̪_nS)+%dJdJ'5Xl$p/3p,\vk@^+kSb1Tk"QKvhA3ꄷ䉣c8zV.^)@o߄YԂ^L5%!P*Cf6.})| \#*p'^Ștkı a01x<8 $b.tDLG$bPu1IJ'0_PҠmPV# 3YfPיcg^xwvu8+$TOyǭtH)& \Iuly.mBTMj,_E 5e*~y#gd7^/Ų6_Gvqd<'ܜ4EGLD ˜vd`n1~rN_ܞ7(3i?Diy i`8&E7$t]Sqsֻp"M v^ngHC hÂ\^̧D0b#ϓfK86ʹ@,9A"4r}u'==M=m뭈 kKUZn>|p ,~al, L4 vDp4M~'B1Ӑ1%eƥ==nZ>+7_ E_n: i-?&<$$dM88|G@}*2=]2Cwq70 # _G7U/Bw?$<ǩDMqq YeK29Leԏq@ {^&^ckYRԿZ mǙ'[Ms^pa@tBA@?58)Ȥ:yyxHiZ29ԀuPTCP'y ˓?=422MCD1}0\OCMCbΫbFD"1r?OnAIKb^ 9q ÐMV^\ _b/ ]Ѯ<g_~_a^!C]";`ȓb||?ή/~R ?{?yZ@"9Yc?p#K N<~ <$?/v8ka17?>\l';ms[/CܿOqM 4)pCn4p8<on>-|;W=~o)3+regww4?~1tVBF-Cs4$:yO&SԦ/4̕O!;&W4UL!{ڽ8JT)f8)19y5Q9$ib\f'$Y>s3J%<C.}\_]A?GGR~}t =yEwUW_̑ OnV7*.Vh>Uʣ ' 6 J1ѕR+*(j?IxeHW>ѕhEWJKv])enuylIWWɌ7;+Z2v+L7+DWl*-V])%quE!] 0L=z*וrѕP*]tr `i2(1؉ *mu%tK]L`jgPJq$2-w2Ruř!] 03+f6+ m Nzlgvq[q F>}Wi%Q%cz{a~&y ]`{;WZz:+{|EW@W>_"J*nN\~I pvg؀XKӊȊ;_jZ&Pd0nYkGWJ\v+pҕ]).])-uwMW;: !] ppv+dEWJ˩v] qBb~tEޣ])pB3Rܭh)T])%Ħ*ygiJ Jpq#h]WJmGt^kK; dFW R҆P2j)] 9:ʬ+ōي2qի8xČp~Rei``(Ce X+lzsDjz]A::#3xb bZx g1lk̈́JU&I%z!ɐe[kJq>56uv+ USʀ])nVt#:Q] "_WZ^]N03TZ>Rʣt ##:C`v+ݺ]-U5v+yL&]).%+RZvJ(Onڏ@dHW Jps0v@J)]QWY̆t%~~Re[w,'J-ab-t%])n$+RZȵJ)[ttE'=]*>o}$FmhgǨ@Wtc0wŽ*oE'СQ~R&[ 'hlk8[I6h-'$uh%Uu#qT%4"+.Yi)"+e-Da, JP78+TMWgUxt%,@*n4+ buD1!])pftJhaek:2dHWHdgJp ] m'Bv+J!%] 0ZjW\VtkוR榫=*%dP+e3ѕЂ~gP)cKۣr`*W])7Ԯ/zuٳȠd'RdFWJ rε*8e`J1pyc]ƴR{%KW@WCxnLe㹾X x܈UTހhd`lNWGr=/19N?9s[̄Vijo20 rt%TيsRf{UdRt%])և\BѕR"6]PW+'U!ѕ₳+:j"j1/Jq֬+$tG]%-M6/,ݺpmX2uJ|SJqR{vɅTjb֮Bތ7YjWڕ|իU>q2`uU6>7U%t t?TT -TaC:nTRe%_yr2ϜCcT_wYҴGhZqZѴ׮i-yi,' J -@*n VtJ( $"`ڼ 7:+ (%= Cd)Lve95]PWKRl'R\6+ >֮+ tC]!9C`в2\;KJ v])%=*G&SNft%2۶+XdP&dpȐ-EWRֿԮ^ġK*Y7'U ~#ah#m4,D_ّ\+nzs8N I XAQMcGp~;fnp +K&\-+(nH3[>=UܘJKT{xe,m -Fb2+%oEWJˮv] ,DRC`v7+RZHJ)k+l:Q"kC Ό])VGQvHYt%# J+ft̵ ]ۚݥ<:2+ь+ v])e䦫*Qd)`dFWv])eMW;U& >ҕѕ⢙DM (suŽ7+&;K튛Њ ett:t;es.yB`Mqa˵bM)sMA:UWmzгL<ׅVd}~Ž۫#J9 [iV(2dWI.<^^|SH)dJ߮*)Ys"+n&+!вJ鱅; E|)yCL7+-u%ruK JsHft`&RZuU&4]KW#KR3d+M`EWB^uv+)ҕc 3R\VtJ).uE9wtEYj\f+ PڕPF*i%] 0J\hEWIS] evG]eN,J)ѕ&3RZu%^C/s/M(Ƽp)m2ڼQiB%t t=rF˱7/C"lmUNɾS*`XJ(R!)ċ+P8dL#)b_3+Mrxs{#GN( X< -C0,$+00ѢSmH x/_pfbJ< ԴXhԴWF-zjVIJMSB+X1~CLa~fPdU~a KUD,QW\haCWWJ* njP9pT@++R1רǤf("uh(9N+TW¦0l%=1ڿ85vyWQSwt\=;frv'kg$=gh6"ӗÇ=>/x@ּ'O8utjNrg~oF|v=nl'{nT_S67񇽬vʙ]Ұ^+levǯ"w܉l: ۛ |XsKlwٵua 02>=*[-!k`v3d-q0;׫Orb{Z=0׸ sj=XoN;kuvaǰL7v28]"t߱k]Miv:-h߿=>XPȔqDƣ?|0f;[*ڃ8S Q-0ɮ7P $M'|y 2%-tB*L)楻y6& )$ٟ$ba_lژO8@_xvd]..5܂9ښtNo.qV?Nfyx;=ezmO0Y@.gev//Or*a2-*17lG;;y\xe . j7ttvOr ?:>kO+1$3WDB su>Ego$+"OHff~5qԁJm=y" dqU*FxDB84r?TpHu*HF zטNx_N'rӼZ&6azW3j!d&V{拉*:vIsQ昚 `IEzqs,kۚts͊έA-gAftW;f IW3l>\>>YejlTɳ\N#죄G٧}a6)*p7{VmL5dˌ}yZQ&;+dfz5 Jf(JÏ/GFQm'%%0gEbzTϿzg?niRkWĖNdq쀓)r]oHxs9??hU|x5~_禮]ֺ;ewS&u(9\6%DR (*`6$7"ipQF!hje_ԒJ9KFcClY_YQT%,5kϙ>S+GMI1ٻfjP<z qq @F'Ig&)Ph#:I=yڞAǷzD=A7ΰ{1F)ěLětgSb_?y7nHe6Hcr9󉭧w p⥬W2YC{=5l61nURM9T-wNNP8^]r\4OG[*nCcai4b^&w49hYsě.NuΦ1,+-v4l$6d_iJ٬Ah>lc@z ]ImJW4Bؔ`Vј3A%F(OY<':ĘHnV6d_jOux1Ĩ{:\AlײG bH T2ϱ8&)F.Jt,6c#FNqmV I)"YR yDFfap0ZR&AD&Ěh tjخMY7,X Di/O1/Tyz_ef!3U9_MʲFhRu]*@M5*R2Wqj܋79զL;Lߎľ[^-/Gg|qis&ilV#w/氎uV]A/vzV3&/dL_(b_3+Mrط'YD.?KzxF\kQ]] "2M7D~~=s<˒S$O]]JmVm%y</&mz9d("͙ rbh̙  K?8(5LkffLP.ᕬ TY=d(NöY_9p5\%2F34fLk$%PEi$uMf\M_]Տ%/t|~OzIB+ Xpis٦&/+̈́jJK3~#Kw://:d\g+4\k?q/hu,X>DB%QQJU3׵9+Z+IXRf䥮)Ȱ o`W˳YY^SzpVΰ rOó(q 0&xeh:fL'ma/AhMc1=3IS0j4G\ $2fU)JB :ұ;-,F驪~#`L;6<+gvﲒiz=S,?7 cDp'sZ׳xՍM2 /m ÑBG d&P/̼i"v"r;`>#/RXO5Cf6/M죷Cƾ0v?÷W;3RNޱKt߾z-u>'20JهfiǶCN Ai.v`2.Ze0Eɏf=ӨS`+&;rӚU  Syj3Ȓ'ٽ ^} +~hM_/`V pu8 ';dԅΖTRC |t8%ھ4ATv,Y_we\S1-<  Hf^APU,^/,KRK\$o7mٗyv0 H(2j)oz4c!I"%ֽs,"<:y oaݠ7UYT}+phK_OFB/ G3.%ѹI1Z`Hh4%)vj7_!)ʢG|!&*TKѴ,Fa~ZX u]JmҎϑVX52 A (RPymX8X0D'(o=6"+̛ v,?Ӑ%(muFi@StT"^HzWۭIfZOꐾ$hD[nTϦP(;!Bq~c~VG;Z4yDu*{SMu>NA$ÐYv`!5:čh.+JD:$fl7HB߆EHI#\4{-HcjBW* \WJK1ylBv&{fV]#eY .*i}.i،uE8enb<խ=$/1$-q8S[! Jq `?{0 FF\d!3hUD1H ,x+uЂ!8n9,eT<;/~9:nYA,yXHa ME6QXŞ;qIB=}jB8t:a$1"X8CXQ|1O " 8@M_DzEsb9".R%y{+m@׏rS1g,D\&0vܱLݴ5}gf2;,Hu|E;*Զxc55h5>^sK\v!]hxjhE;$Qy^LI7o*iyGf͒t E9vb2_& RzX;V5$Ql%j+iHVwUx:Y ]d}fQ_f9?qjI>Mzp2~'wOߊq/￶~jOwӛ|ڬ`mn2 j( 1\KfQ^ynvJj*{3Zͨp m/Ov #2.f7F*U`V"+,-(nˇA l`ںcSRlmQT0R#?J 5/Ǡx"lC0<c*/'FKF/$;h|?`UF,8Fdj7swL}3ݥ+1$vXYaeI J *"R9i9ЂghpЀ#@G ~]%M񳇳ƠRSĕ VHײF[LeyYc-PмjN̓el(:^:mv霂k=@ D0Eez*-*mkjJY+bmSpx'/-}zXzm6X挑m-]l}>(CFy͞F[[^2qEA3w-:0P0 FU]rv ۍ#+7KIaX@K3BTT2{U%4{Kp& mZH-$I tvDlw'>9jHm&i"`驲,ᕒ`RvIZp9ymQB 2ll-P VQ3Z#BjfpmݐpḂLbj7puT1"HYNpZuyR}zW?tBʢȶC 6a"R7.DK [} 1*.?W";L`\_4P tnvHqYuOWeq4qssU : _+zNq;9+׍zzY`^ָCb4vG&5h SANAmY E/gMypMrTI0 zļxNcq/V!(wu}XkCzwm!T+:O#ǰroM8C_\Iz{9@])$ˆFIntmʇa޻9I= F|a(<._8R9kVO ʑere+4hE9\#=;h D2|TBopy0;h |Cv`)aĹn.xBW17V4xJkz}"=~qrƃW;`wpz^W̋|]bB\`sq7*Fi^,z<[j.E|PDvse{YqSD f`k$YSIPM+l&n=kk 4>d4-Ir+DR>"M%  H;v>3(l]WufVEKFZB5eT/כפt/ۏi^n [-yfh9ievk֓FT|j/%RhQ'34s`JG-m{aD%,z}~CSb7S5_ΉgDc5/Ͽԫ2k[wՄ;XSN_WSmYUL~-֟~v\,k 1_~L':C0Ķfm5h⠲,OF>N꧂CQe{>-C8D$w-Z-z_} bZA`s|imUU/SU@|c*4 4:|77KkUtGDHuQω-7LmqڝG毷4dy8q6Rfz2ݛS*5h#532A1{|TA!)L; B3Q\W՘Z-{喾Cppq `,ԩ0h>߻F-~\r"(V3*U1Vo(2ي x4Eؙ! kM7Fat4kPi^I+:aׁO `O%Q J;4եOHC%p)9ALs:0Ї9ai?7u՟곢zh8sc+/zA1)R{sg 9pP)dɵ6,;%\R3B?wѸPt.%? 4mݲ|%"wMVa[ė٢@1B8P ?Uc%;wv66o-SyHы|}0 8kz@2A>NcrlI#fbRdQ/ێsUڢ| Bdrie/;!i6i'J*mWpd^~2'!yH'! j+4)D|teUqa}" "+G?&xL*2?OPaw[Q$.-u ظ9,p*6?K'`ub䚣%=!D'?8,At_tɹq^|*Hh$%hpM+-!Ub4mHR3yF)Ošw\ 9VئY}5"q :z~XɝnE Z.^vR0s cs0ЌE<,+ bFSM:xi?h[Fg5+^`U_7JJ nG@0F%WiKB umh}ùB۶=K/TGHӛn9}'z$O,iݤ#DǶ}jm(f(RN#-5(kk Q8ŬɐR^z$}:h|Ê(=ǿ:hӇc^o"[@4!TWa3.)zp8,7ppe|s:1R>/v>6* 1Lħk7l.9wmg9WB@0a\rNQ޵6cb4bX@c1n`}X4%%K/K,_dS");;jRܾ3?KVF'G,zXM\)]BeDUcKoiԀϖٸ$HChfVmiPyM%YR׊l4 G)kԆXez.ʩj+#Ǯ38͊ ia_r*C7"\ G=} e H:-/\yKEu\jyy$ceQ@DP yIT,%KA͎+72>^tl-T4'(r8\WsBd`2 N'cʋ"Jѳ4,csޞg3]gbjd]2dvb'z3{$d5y<_ZQ]oyBy:P̈́€IM*)͗38 N:8ӌ+ \8͹4&u[UY62^qe2+N:S]|ІhC˜.ގsϧRvbuV6BMNNxƸ;St?ڣvR`r2& XY 52d; PpRqP/Bus,RQ@f &eZgD :9v0odlո ,f ٢J h#ywQ^CnIHi BRAzیُ/!uv<]̗E#F>(!rֶL0W_6˚>ڍ$Dm@+\w"`tB"j7NOVyITDAEHC[^5u7#R\eIsi뼁G^8㸣Q]%-wUؽDved*qTkgEW# Gqנn~LXǔ ].~鎑 t/16;Eܠ7W`̀EBDLjnd|Ƶ$Z1-xWqf!>87Q,l>=촹R^^AFJ/1*Ae v?l۠*BVנA2P(oSegÈTI hLJN"DC(a"UPt2nJK "gOգd!VJsFt;Ш#}*A/T,PF\OqIѠ= D<D(*h¡dɍJGh"˪6">JqNqtQ{XjdՀ1 Vl-OLJu>FeHNĮLN=`' x X`j34iu$\Frj5)3C('&'Jg!_HCٸ)X9kP6Js'bM [Kvwx̭U[ّb)LPKPs(RSqjd|Xd%j*F3 &Ӊsv\tKq Cf_YK !(vrn8+}Ѷ>8FO/QL1c!wFF4ؓ4hEtN L^r}1'O\` +6,eV-;jdcf^_gMZr}AfVZe[ԧ.*ۂ#tVz;!q:RA^U+ʠF%7Di/vQA]&WL笈gB*8EΓ7ެdϚ!9/Fo?Rj/[*v<"SڜrpY*+.kGiT _W9qG oElvԌ blw5-P6`vem@PuhC#\7Fuexڐb~ePMxV?·{h}̛ S>uGBW/'\1kh,^K#nՇh1ʖhG O_,τo29у"A1N%$%j=W*䆲Ŭ0ȹHԝ?CbRk|VSJ"%exoP 1CUG;MlZ}^nHRe}c;5QF}rEh֨ѓ=:.…6WM®}QD^q5lnI) n~f Bu<<;֘(+}b#H>zb % _R>-.ǯM.+34wlZ$[:'h9sskN㒘 4h[:{>р`( -i:qOG[ sڿֺ::C*)G;'Uyz;V* q Ho w+B{q-92}Z v20N'<:G(bh@ X"%RHS "au^ r(ljhzWa#86V<[ʍ)W`Ċ::Kf쨘8,}g驤v9N-vd.| XL^3_3y$X<|=O+L|>COJ:lɴbDm,sJ~./p TgR% $o,vY R켘CTi&f<msjpwc.a/%cba"3FK5[<;7sz-b1tPj(VJ0@'~8MȻ1"+8 LNN%oNk 2=foZ,:AX7'܂97ig/-{*sf>86^9Ύ68%F K&.VqHBwVEB%BR0<sYpZ<-}7KI}{r:!9]fIWDiW`]>Ì`w<1p&傲Jޞۢ.ˬ`^+}/&i# h @rr& 'K52vXA岑v}Im\R[STL14BH"͹ΒU4w+Vte#Q'd(ݭ\#V+WGx*^=Y3q"|$. fO8 ўBUn1Ie]i"9c8a Ɖ0 2m4o~Zyi &+=~5ٴY%W,7\O\i\nrE*LBU Z۝ W> '$I!!T7wP T.Ƒ !̣@"c\R< 3kIDŽ" }&ul!:@.IHGe {GD Qs5ePlN"R9%db 1q~3e{U3m&aYE(7_~k5`334Te%̀X%L#C2Ub7'ϢpъB(Vdrg1jPG"&6P[`YGETz"[o4@Լ sH@&i Ir$UƆ}*cP SF9Q.`smJhj\DqL&MN7+ˮ-D֎[a ]h+$ ˁ|q\ys:GVp;=X*ȡ+բ+Dߝ !ّc TȸGyG&ߟ=ߥ52J%*%-UO{a䖢UVA wdDpNrv/{$WȪy{e$XN]+=^J.C%`"x`Y$r~E?9U,zO 30b,£3fS#7yC0`K~S;\OYC}]% tw\20cM6.= y{WK2\!fup|l]?rL )1.?MNo :s=׽Rυ$_=ؼѐޙ:K0odgĪb$"~f%];M9hJ) %dISq0mǴRD09_ SHh<`BtViؙu2\XS*wz2uvhG,Ì1`{Q^۟!BeC"$$dc&˹&@Z'BR;+i tT EKXN|HB$OTIi /~~&9 咶XĹ8=>,)$'6yU` ƮVנ[U/s<_ʤp̦٘Κ< t4'|c;ncDMB ]ph< @W\ Be hxJU;&:,Fr̃$ !kNGIǣ^ѹMT$ e R|*$i]=XoH/ !sfEXH=3}݆Z|9sR~A 5W4r~OoX)c?r:4!lc Kni5^ٽջ ?TN6?C *@\! QrQ`?ujBΡ+LObcެ[O6Hثg(/*FZz_/dzʘ2ش-"\0wY1aZQ%ݯ`ƻw%(D޽㒤Kr7I& 66zȋr?FDe@2tTGklji9/y<͋b9/Q43Dc~o(%xV/Y* ?sJIoaf^(in޷߳q:I~OeQїr!V_?Mq O4.uZ`R6;\aB"ɮ{=^bJbvո8P_q+q|q uu[M(J֡lm[&Q˫- q'ILdȴ UZԑ,>$-Ff7IFLx}(3mDlL݇ oN-Hvo"Q,VVhH ? 2kn/mWifGdPg~5,9̇𽫛gkq7}=ba>AY$+K)\,nPI=̂AÔX{?ltPh@10z\8OQPs|s/?\~Y$ICs{ra_00rӫ`[t,ȍw I)1޵cRVEaJT^2kQp[]Si 㟸6֧WsTƍu0RzW]ǰ WGrf:%vn{s:{xiVQV9h&E3q>kEr_~=r<>H ɲllZT16pwyv2|p8 h2,w/Ŋ4Ѧ*W*-[qf2zuF<qDh2Ӯd<Xm4բyw\χ4ͱmSr Ӽ3>^Rvk<|lC䕡0y_}6[-^8gc#;xvzpxtؠĉuqz Ƴk`JsSZۗEZ=w=zo0u]ޒ[1ͯVӮ̧K!İ.j=9ƌu|C`U`0fz\Qԏ m۹]%B1TfnY7I6ɡ. }탛[`F:yvq9z1`hqn܇棹> 'C\L E͜%gYt{{d:8Gt;va*L!R50di!^)ӋOy!"G3 %j k.豷 N:Ϳ :%vN LdFv,pyVe e(0X#UDpcI6pWͤO%4-~㋈]RqŻ_ fvӛq20xVv0wq|fL{zݗH ,s qYpΔB545Z~Z$6 OdZM ;OXTR&bLR:y :kHj7 Y1¨FeͮFڨ` ȟ@+W6(ťZDh(.c$@\%pcHKOJ&(EpK <%T礆D,4c,_O˱GOD&Ъ` 5F4CA54U7kWBJ!Xm~RBo7*~O0#$0?t@sIlFXN' BP`,gbr 8r>UNNv88H"W oaJIérf>D#vk-xB~'Ѯ`sh!e3qi.{@w>~-UbBrd-􌀒%XDdť^8TCc^P+Lq''Y9L%d \AΡze5D/'1 Eu0 B"o;"4֚I+?N ' ]MWBY-)YPQ]jcY jEN(L)X:U$Lhƪz7jSoLv}K>-Se9MᒬD BI[TCc9W[w]uf [8\vzPk ߸ wh*F"(RbB@8Ct֒ܺl,Oe!O/G՘l gT mLjnn}eC2X:ttOw鲅 ;]B![gJ0fWӯRe6ԀRjMk,PNsO&__~?{x_g_[L\Ku|baZfc7Bcƻ3'K0XTKuH?oFٹ?[~yO`q/:~6?Ef +uX >>漏9cXXAq 3B 墷q1 Eh"BV Oޓp?[PN?f6'7 2 .p~>FK~5φh׷6{\/?p!>=N2P3'e%KF~4.Wrl034mu5I!9iqG^),E7 .O|NA&0!cpI i1J3c&i]Ktq dg \LHq#=2^r[m\4vF|2 dKt;U3K&QY*U t9}d7|E ;e~dlci4g`|?t Gx =iypMtϕW^CgZ=NΠh=:2gAԹQx 4ܛdW/M[EisZǜMS2\1IRX"|Z$)B&^QF'l7%v޴?ߚ:>߷ȲƷ(M3.!L: f|~$f( $ M*,8Lf#їy5s/fO:9IY/f*cXƤmI:OfA"QcI}6|؂dҵ}\Ȯ_7܂F@@8#PLq-9 As/ sytR-Ќ#;[]pKGFwiw.[VVO;?عﴵ~ނV5i &KK$TjD@SL;B+G< .JUV,UF㱴a&?Kmі.m1?M.a#eQHuIq4١p:%%0[La7J| k~B0{ w^k.)XClcRkSYy%"khNF+ѫV/#Wjo:^V H9A:dejk:[$[Z*PUCNꈝϢzx-uLe|270d*a~w %o=`ky13GVjhJP٣xaŀrkN-6Q87+6LZ-p-`痓4+1_Yi!+jd,cq). p:華^ {9ʵ"YC[迻q_Lq 0S:Yj4kf"$?V_Z P f6=`.>].?.Uo3qz9]~8PL%UW,` Bß/?\~Y4$WlpοFXhy 0CɞA5t1GڗjYn}jS]+0`kΧpؔ(L\Olt¦8g`o鋶 Z%xQhk\}oAg 9캁q4Xİg2Lv]w% j4vYݢzڴ$S*!$Sq5&SsZ? j* UBDŽ V0!{D /?U VF$ uJ MtS^Z*z}ُ^7P[ [T_lBNx9رpp(؀OO QXs;y̍.oMH 6.]~+0eq[D`P~Ϛ /9c54#tK;JɸeB7AJq=?JAO>O鎟R%@s:R-S@I VxjʏgeJA{-5̐ޏ7MG`M !z7uj G{9'E3Z12 ѾMo{pgսޭf >#ˠK3j=hhք#2AA2TUm B-gżZdԝ'[)y%] Wqѭ\{/:<54`LjЮBU8n: ]YoG+ )>x%`yyI6eټMRfU5)*eXbȌf-< =?=[]~vq/w#, pIJ΍  `[_6%d&wGLJ >M^cIqwIJg^9ݥtejRV)GtU]3o1V\݅W֝|OR.V*?Ų9>jhwoJd3\ $}<,t/0g;-~`g'Ly_tG/hfke" 5L@e )NH51j 7VlZEcbd>G] \>a'kfN g+[ T㈅tA*ڟtpu"}+572^(PN1g5q,fexgk:S9`vh !AS;<_Va%>d^ol !)fܽbb&gZ?[/J_ICG6Cj %%X!n}D.;.)OkR*Ĵ$J'pz78-U\_er8a 6hGXda0oq2=8#Ա~BWv#ƕ__A?PjО'> H $ĒctRܰK314ݐ5A608?НGH]I<'L!u>:urb˅(9o3i|/>F[strߺGCg"4Ûlh7=}꣕4z\6gsPhrdٷO/&0Z)5ڛS0/ 5Yo?v1L~?̧ۙf$[y0NӳéOsfBbа( W'Z `h[2|p~f"@FаYʧ0LمJx )5` }u)6}yȷt88fű6 | v̦6{6ք6{>JsW&z?7y 0ױzCn`У~oSsƅ6e =6n&Ɲ"ږfn[bNdz/Xބ6IgTm8ظ!=ZlK.;#U6M6e-6W +;VD6ծ 2ڌ1z,9֦ (¬˘joMznsnnƍІl䨇M۸暏lc6b rnRѪ]:_ qDZB:m 6`=AU3-š؛Gl}-5J A[KhN~U4WO;D3O{Nd+}ҎsUXvELy֒;uI+ yTc˟ܙOx:Nfy>;:XT mÉRfOl9WjWN&?frW(GhFX3K:={o7&flw?0\\Ű!ǬaؤU0\nN tɭ{ޝVȴV~k7󃷳y /v'ijrL<7&Ǔ~3MertyF˽K6(ki5minwKl}:BnR!ꚳ2 zz" Ulnvj}8Laѧ)q`mvbp2v6aF wZ#=kFJqh 3J r@n;W_0";ZqQ\ەJ7=k:!&1*4b1 /Zh52Z[/F yvS}5Lc oe6̴5:a9p:Q&jVݾN{QTUS7ݗEB!"$!Vۡ ɵJu ر~~vzvEZP*[/gWvn0h|1V6^5-\)ڂKuCW XyK/i%}lG }~Zc6R^\h{63w;ɻ|Vī!m櫗,H|^Pgg{*V{"ɎU..gS21VxMYz9c b.҇CHFsB\էaOI9XI5A=B}ڝdOCʧW$/W#5zѳ ^X/DaV?:sYL>`Zc)aGg}\VɕXO4ti:}Af>3|;8qU,PQ>BBl>(k$'II ؇}Sz<pRvaȭ7睴uִvC8>)Xi 5 Y敆Ya0D c>X \qWn6Jۢl6a]9"~~3"W$rETqDRWȕSZYe\՝Msei; F?9:|&^nV #oԱzۑa9ıT W5[c=))Qr4c]ʈ\(=b*-=J%o NmO%V.BbX\2-'Z'nZ*$%ЃckmhzR^t@obk4_|%MD6$n;j~.Wml_/KD}-Q|ZJ\ 7RJ&smzW7I)O\KPBY# ک|w^}3L3th@ QnkD J;4볢r[n=,EJ߈\b۬B\Un qjhfM3ko(Fnހ부+Q4 '&ۊdAhl /Ru͎jo^r.c#֟kp{;#}rgP 0+yafE T}ͥ7sQL OX" ^% l4ޥ,}6Fu#كʂƛ2n3Ifi Y@K>9zH 1CDO~ a%)Ϩe@LԄ$V1 )m7ՇZ p^T]eԴpaӾ\#d-є]bBdmBBd`D§:zDĨ" ,K&ӊX_ vyoBI Ljc|˼)RǘԅYspt ;%0w{Q 1XI!ozQjLfb^ofBQm `5X`3np|zW!z,Ǵ1x"0|Ι RfKh _㹨8|u|}lt 2{d5Y}~d\pfgkyaJOdHE l앒eu֤ A0*K ِ n\e) +]Ք~>auEHg&Bd2D `A\UIH84@8'dqL‹@,҃D@dMPďO7ӔxBEW KZzT no7쵋/b.BH`WBzYZCFgYgIQyEYki ^zQq'?{WG0Qq^?<%JF+RQlvS]M1#222"8FbNQq477MEGe+&6nZYPLOp6mq5NA( pЩK8-}dHG6(0FdT9cIHGlu &(#Zj'|iOE8qRٚHw{nh L;eQʪ(Ut5Gݕ#VF0^aOCc =uݬy E$^[*Ga[Ω$hݑ`TYA/Q~+V%ĊVcF!YBn"ȋ<#:_yp5aU`珛'|UJŞ C&A,9 nG5=pTUdeT?y[\i_"Ee* 2)td'ûEAS.ki19SVˣfaNm]6܈Q )[A T 9̗oU;i'7ul_Ekl'[Y!TbOۓv0k|-<.!|Iw` ۔`$/x2OVx>umakKKYb,q8!>V_\Vڋ|h hy/Yuul ~vTxx)"y]a{LE3GNW3VmM`+󨴫8< u4q.튝 TUr v07r*ե 5P^L=sV\.."5zqφKo/qc? <Nt#Z,JP?a:<"9?'i2:c+Q1"c{:;>/LZIII!`L_ PFDQ_ _z}.z> >6N\ip{/%2Cm`&h;"_Sft?&wg@i=Fa7%=,Ka(m,}$DA7 wp}ct :010ڴ Um%]ב"P6tgS6,X!zM? VPWku\zE^ǥ7?;<]odqzQp6Ҋ( JnS]$$o瘚Rst:3ڌTɄ*Z>sm6,"Xz+GKLJOk^AƼ]`ܕR,SP+ׂsʮLx/mŽHo*Lŗj}8;hw,s]`0 mTԢ7y=cdh.gG_ řlgJzyĂ\ R7L(&~N)τ7eܧ8  އ.0z"S3t!/=aF+y_/?,;?n=ώٮܙ}ez5L(:6py4 K6R$ uP)S+DMX,ZZ%b$C|4R!< k !)Yc8c5)BNJAf#P";Ċ~yy ݕ% *ɌTy]nNJz@/W$NuZfmlSG;rUQ7RMi"6 d)4je!Gg}0̇=? t[>[WgՆ"1"ݍ~rjQVr^s.r mQ *Gk{Ђ@Ɣ,A$Glu(HIE{UpM Om-5{{/-9Œ!,.:'CG(:~;>Kl^[×,Nf"B:WZ|k4z=mW_\;v5=FQxW>ki~ZAx3oo|=n2upt]*0Qw(7tE?xqy%kvkY7_WTOG8wJuZv$׼!kzhᑙF;uՐW@lt5!\sTHbV~|y8avGscFI>f$i<Ͳ7zm ,d6ۖU۲P,ϲﶽ%k +b,`..>.?M% ?`P y:?F-6!=R%AׄrhyY`P(DGd]U@ժlxH 9V#-ɼQ1t ?NKHHzm@ERfj2v >UbZo^끍3ׂ1Cl^~͛‘I7ALLTyX`IBI0VIqRJYQ,JsL8ub#z~sb)zC:IC ܀S,ӈGE"wTڶJS0k*ߝ )K3͗f&#fmsQgTTǢU'bPū\%,jV/.#׿Gß\zcCtu켫c>SĊ}{+1#7fG^u?,ޅyq*Wifu1zv3M#ؖ&B,l-wΕШO0aY{ fQlr]/ѹU#UqlԃOۀ)eHq) ;-=n[P *cb4nox17C7a8Dj/}=sqKQ+aN!OR%7ƲR#V#zo=\,]bH0MWb쾱;xjFP$nq>YF|ajb!]\NWYzE*Ko~q;7(YAMWu^8|g/?)眭De&5u..>Ѣϧ?yz.#gUHȇ ,SzϣG<9oZ~wޙ jw@Wyd.]]g(ĪL$glIV!q }If쑽⹪ &5ѲTSMNHU*n* '⏳[zMF CM8y[c7L#*k~4'V7-8z#]̼ [FD_qd!"ؔCX[2GNS7P}V N.RP U:7榚" %)i¶K"`\ިz%Q,0T#km9J.T/~U~'&b)WTQQGS$Vx֕)E&'JI7vjŇC# %ă vp[B޿&u<@1jػt7_oO*Eg#܁k 묣FOITj}Ϙ8‘cb "%@9 {˹E"^;Б.BmM"a^HW 4+dr wΡE'^JA5bڈ>t!GAS6d#8gv&Te`^׼SR7#B@lnur0KԪPtaP0聆v6ƒ fcWldžhZ~x4VI#< )Rݥ($V`[?=PM4ug[8k¥xL(atxNm~ōlⷄrMjGn%Qq^)_obz+bTy6S)hnYE ABR<s Y_d_[b#/9 O~OщltbX;zNщwQ!`{EQ V):ULщ;Ketʅˋ/_6!Ańv;"Ubb,Nt[m (BE-h)r-ckXG i ʚH~Dwڶ[{tA `|ڻd=0׺V[,mr5?Ǣ:YKZpd y:ŖrrNГc7kr5@dZZ$86T~`&p :jp?` ]Q|,snzHUkjjC^!~PiLU;+k5}>!m8S5GPrCqQFI_!i$ߊ!\`RjOtpqdGqZe@.@:錪 3C Y׳˿Yl}{6֕rwmqWj$7#Gz#pKшv Vf55YC"ht!dpWw"^"~]KEb-:EBֿs :}/Bԗ>&h 8kZ @!LT% n0?_`1mCR 7LͶpr=m2ro>"\fslN8NAMBq*[NU&zi2{N0pk*%HuΘ+9z@`DlBi- &9tbjx 3̈́Dd79_u,{~Q{m?06YiWG1g '3-ɅIs7]}-HF8ܙqq)Q 6b*QRJ^^G5;U#:1ׁz / =w,',`  Rp2_&ĜPJyYSj!`R@J6LM{oc69{@ i`)*؆-w3{שW>^ 1)X{k7CD+Na}XC.S33]^7y4.х qή`hr#șJYnT+q,:d,Mεا_xVffGS*#a6Ecv!>:jp cF!ԈŐ p[㟆@6o? ڮm%ϝL&?mOonk+_7y4t)xA-YlGk9x6Hǯy" zC={Iү{TܮNjp`_t)̸2zcG]pG ֫[q]L(nu1ED~BIJojcfCx.Cn?͌{KӋUuGmD=桏fy%ģkrO 2*P}nKm[ 7v@5lS0FE`V 6p3a}!`N $|=\7y4Fd0_LQ-|#ȹ>)L+n|\Drdw< !ࡳt"xϼMbt%ƙ8yAlំ+T7=,L*v 㑙(!'~3A3!5qZW- uFdthWk1"3kP 1~aӈ;O"hhm:c4Rn;xzPsG>1eP1 H:B9x\CXsyI n?{7yob;K iYvij9[Dr`'/xҝ%o˱ bcmF˷L.^7uWl:zuwe'i0Cy}4kց_ v{Q|Q~OsQ$}$o9B~' yd#v]/KHa9)|Sw vq .B=l|㻟.Ogcʵs)rim1pVjxfǛm:"n{ix;i\lkP?\#6b-ǿs8XS#Bde68ê/iކ\dgqSsFa^Qsj{06h7xKBC(rQqfgu*ƀ۠aՇG@AG:\Z<@/ 1ꋺOK,Z9J]ْwmz3׬)vL{"6HِC۬m$p\FώiUerrdW ¼u3ժ?qLYr}TK',h&HKd{j(QQ>sC |Zј}-aF8M{8.N)?g8&Laf7ηq9A3\L iԯ<؜yb C4#ȹ>%beS|{3<wߍϪ9=,\jm2H˓-UiQ9aJY ;n<w겱ҤjL]7|X|^E̞j7!*nb<#0<V3a>͂PՄ>u'2^+!#Aנ M~*M.vBъS_+ogqlQh(F.-zOL}-jơ)9ƮSJRqݧ͜_VDHmŠSظmgBj^CMfTv,(TJӐqmrv1p3:q4R& lbM3eL晢Dx{(w{drţ-坐; _>w aR|»fA_Oͳ~Q߶r~͠[l|JfY DaRCOΊNh/\gM0%v҂hgG\BSf@&>O Ӳ9fc>w`*z ts#ܬ[(QY7`Y#!ƈҌjꌆ/Kjwx+gWk7,J ub=NqH}da<}rG}\lR{@y%i꽃;_<5IFQsh5y-ș>! 8~%zM[IXc,gjo{SSMqPqK'@Qב1CMjb `Q,#ɩA)!lv{8w:]>nLB\IlGBf!Tg칦lk>lZb\o Vf}~gsx 9o@#(6oF'tZT3#Q$b &c25 Urn ls.QYO7ܲ8%7Qڜ ]x+@ :7\mH!L[\fn4)n?c2={.Jz$ Of͞pm 3%R@}L%J1ҟeYb׿lpVwJneʩЧlK`Z- aa{3減6W%T30rӄUK?tƕ xd\U$)wr3XZ`ӚU◀y|q/췝u9sqxGe3-rJwuugqwtЃ^f=t34r8|s. nDdI=D0wb>Y}Ze=lIVv/ڸg>6tsvI>?~;{)՟:` ;2+tcsq .4'S)m8xFWZo7f}66A*P7 X㚘,1 " wࢫ&I$F|x\t-ș\ȀqͶӯ߇ב& t:lx֑(ssnkȰ29')Uh¾s0ނ)18{a`O47-Sg`/<"^k=F>޵57r⚧LUR'SgiS;y9'f=-f/(r[j-dL*#S&.2(v99p!S`AYWz(J9`OMNBӸj] Mc1*E[7::A,.DVzHD/˕XAl;0u>s^2;! m~,~PP%b_{ًW76uxWL"9cU2j)9eNzTR IhЈUc")]a4E/G&["M+ʹ _Evʦ)seslJh7a&ly;P5q;ly3PuNgZňF& zpU'89Ȏzs(BKB]}kx1+[ KNY%)Hee/Z;jbӝP hJ%EJk;預X  48]o3 3y RUeENX6dS#X#PpZ,ëJ&*] WQ 0)Ԣ/zTXOPcI@\^VB٥<=ZZ `Z;u`!sI3C:1EE-AuAQt>mW@m,؞5џ,JNx9ߛR^oc]ueҼ XD@yO).6+EȎg4%f T^~TRrYuE,F$EH7P ۜr!(" v^hۻO܍pgw'gZ|?llq>4D~"ZgkU]$\6w"HwzħxQK >fXط E=%,q m!'SJF[TL84Q`N~ig5y݄Waմ +ܡXu>`U2.\ilU\ 6~zJ«E@*de׉lț-4"?S SuOȎgȉ^@ 6oI(Z)p" !M9yD! ZzGw;Qy{lx >y3(Q-vM:vCPh.׻W&<Έڮ#Y)#vJAQ\L/ƭnd! ~uOVIl7VCތtGF% Bx%^+߈BFTBY3pb'r(kTyYX%ViaF`Y}·G˿V@gzC9? ~ :R M*nV;[RB1-ݺ&".--T)^mE=G!퓲W!yw,5`> dS.ߎ"Vi)2x r"N,!bգ(Fт=ԩk9w?U:lg|cCAaEXrӧnO5L&SMԡ,.#W,uLd+cu9O0nK2;h'찹΢K6α` K94C.ڥ92{#1A;Qy`&fɒCpz ]vkT9gk?M5}G x'^"BP^8\IwGy>YcVU4Ed֓xb*&S‰Ya ?Sħ N)xѳ }AR88`+Mmpɯj4)w7xRF) QUaamDSVu#`WM)ЁX6  B,YPVKB7#9Wqn"n-WxCCLM!n]Sތ֮,X2r h:G{u@6!PuZY AjJ9BPz92r [Q**+oHE7AE(OьQY.:@WuI`CDbz` 7A55DETb79btIm xUk BӔRBT=哓DT=xw(_\|=;*fӑnɌb{, kP,؇0҆JLjkenXA\QSQUÈhZEqu&{[$TWN+A<fMwčU-6RmB:3:كxtčT`Ha򫬤Ӏ[;,dS Pv%UF0r)Ƕ0b ^KeŨ :dD`mbxѦnx&+ƈiUt3&{h>~aoLsJm',B䩛4gbD8/p,$ScV[IuLUyfBƀb 2Uob~a1b D)} Ʉ k}<)m*wwVX/.׏8%"%Aڴg|Mh1YNVV/wxL,jT 5UsߝU'Z)\}·Tyl<Ӧ'!uZo"ln)ln>)4̆-f67;1*[B6U)%y%Sڤ-ժPP?T.>/؃x@*Q֐j[wS%TAPO5%s2^oWWuՉγՒ>kc3^=R5  ;rM5'&,9Ҩ2ЮCY3ghʡѱ9&hbu@;1sgehF9η  ;^5gt՜Tn1bЮKytE6C 턍GDEqY牶cA;S%/1[(G_ FZUuTsM\Y5)ucb L2FG&6.]eT q3"ZhǠtNtGjn@ i$iV&VX9 Tr(s=mFV\o6lYײ=dbuP'T!R@LтdYod -"9{LuJ߶:שHl[fz*U%{{nzX^=dFr`egs h' l6<֚SئX'HyC:Gk>m~7j}+zE&1n~E>~^1!Za]\֜}$~x_6 vl>Gަ":'DdrElo( 6z8kc)_:/V Zǚ|v_2:G_N 70MxL@`$su8'?MQ߭X:>?ʋ2r(w.ż'x\X9UvQ}}|w2z'G̕P׫p8 ~uˆPV/ŏ"gS"|,Ň~ _Ho\ >VmIxcbGһ?Q`m9]]n?5_![&>XcFM컉(ۯAnhqٰп%8!pkj4UM~Mko;fB]٥Nj631܀Th<ӳ3)^mebL%n>WF Nw/L"eׁ8scj\MGl0XZBtsE4yyv%=&r6ie,_} b~<(k l5' V,V1mc~=y:Prbgj1*I4+=~D;M+jCh1D[ø5\IV'5>{l{,*F6gK>'C$?{WF 9KEga{fiyZӒ|tFVRU%`bVfDd_FDv8~MqSiѓƜuFS5.lؐ; ';ƺqjҴvp:zsXZ[w_$QГ-B )X^F%k\81{ 'J臘ɚg‰.~ ݚ7.h4|F[FutNN/ V <Ј=SF U3R[v ϽC49ku49`m^,ўKvJGj VL}ՈvGjwF+r` Ptb5Pixf]I3i҇Y|H2gZ<ԝebjOhpԷ gx`omAi\ rB|xº3vT!A;KeEqJY)yȀJ xKW7B‡hFӂ3ƴfR+pcR&iw0&yww$wЃM/}쉙lNfO;P^I+W_.4sq5ξzk&Wa2b1egXS麏ﮯ}o7U0կ&C( (mW5O~HzƾO K=""'BS1YXh E~/[i/_gz& ~[ɠLUqNl‘`; O(A;oaF__|?>W23rES?eWC\U8`8Qx*)Ü Iuϕɛ*BO~>plwMla9C1jw$u^Z-.fk_ߜ\>LR$/]rlI9_1/n@DͻD⦉4 :%B#T|?gff25<&W(uzCTy$82HAXAu"% XJ'" i$#&x'7=8@DJdLJ/a`c'5{Lt6_L -nR97}L՛, N#섴AfOfS7?tQ9;JRk/ȋOLAOǩ!8SRﺁ|8Lgjh!\{]/wnfD{]i+6`Պ y`c">he!ɸ<&Zj"DwaG֊RN 9x"$B,,b]$% 83|(W╞,h;v= &jry,z",I]'aŰTPT`)5rgW5]QeX{̰˞ɿH JY-3AAgpAHa(_MI 5i&̓zn<)B lV)k :()'h' ac{JySXjOCS*xaz710`k^(iRZ^ZC%ʫfJ)~=e`vnA/IM__?hhgydp?ߓ: /0{NX)+lPXKI~kQJDtޠZM^Xߎ$Qь>iJ$UiJK[}{3LJsiA闒z 3tP/x \hʕWZnJu G` jUO10)ɵ[F_K vO,O#;E{1ndBI6])C;}z|R n4AYo׋Sk1^xV_["b^5CO3grX;NcܜV<_Kޝ>WS{8YMY 1y?ؒ[c1RƓ0=y?MhLggg94G^&ɬ;FX%NN}EҀ+ZqdR)EeNY*g\x̰ sÁAFItX+zntcͨ߿7 >#Բ ϻlRA=H].u@F)1EᏖao`%BjcZH,z20m!7s_|sD\ωv:þ7îнϗ 宻~cj9Uh9e*vQG\`}%ցw R6H1 hzV Q6FcaH+%uX2?J-s5%b=Y'[eYܗo, K Zcs?-s7H6nl ٸAZFhXIk Djl NF3 Ax.T}]s?[:D[0FV=uWg)3~ wfeL9(&g Jb`wK-vZZ3*/0(Rc򧭒J vk WE( Ex |#=4#D,i42,2Jݣ5MH˥k b:r)0X:I F[m)1b@g)KZ%]gV+IGIQ$:ŃpTp*d !֓X:"#ll(- ai |nc),D +m;@ 0Ɓb0QQ)&OP[ڍr?IeY I%gPgI2f|#ARKj C`(ݴ#K Hpt`s"`Ȩf[e;妎,s,H=f 2-;A4; n~0 LwcxuWAHӶ肱#n+X W{v}i+$@ARw8+迊O{5fidkX叮kaL}x'0ۻ?go 6X=yż}LnZ3AQ~t2m^l'Qc=*P0`Cd,QFXyn:*֒Z`~ڬc׈Mk۟7>X3R2]P\$q&f*򩕙 O΃OCRWS)~ʇ8?DvQgBIΫ/z~hF=~ A}HeqesX!+I5<$&&S1 ))Ђ6gaZJL\9Sy;dƼX}5 xKHvl-NZ'nhq< N4k:zIR#r,ͰNV1Zo7l: ݆#Z`hRTݻfLryXrWkx e5nj~=Λ]IZf mTwnnmYAr77`̙[OAsMLF:gӝv) ?RD)*TލUpsm=8b@R2ja16.!p \jWEdzǏcNҜ L2ÚkbM%F"Ѭ.B 1HN4W`N&<l_=6KbQHyYt?E2IE+^(~h=&2 +Q[A QLزƛ&7h ?Fly*1!~{Cs/ּ3!w^Yc(wיwd@=&[l FR!~cݥPXPp }?2˔#GL-7J S ZKKM޷PԨ,LTm6iTXĭhL?H c$(sQjN±N#hI1E5rןl8A}:w t.k9 њxRS'dA2BZ3!76})%u?M8~H|~8P T~yxa^]r3r6DRZKy4%˿oXMvT*(+{yz:ul^+ $DIOc-Rl#4Ҍ3hb˵M *# Y?^‡jhX 춉ubDX:z@i ;B}&ٝS\V)X rYS2+\(gY5f~Hu03P]X] n*ec(7p{ڢZL04M%F8(8UaJI¬VĦƷfRVʐ(v' alb#M4AI%"FNĩ,vHXfRsǎ$u6V]Ɏ2H fZ ; n6u)q; e# ey-ڡzla)0!&دjElJ AKdcT$'r4y܆6.lF.J==m8VI"v} Sa>v[rCၦKQP?A{u mNP4O93y>ݼҲ3W*~K 7C7؆IѨ?~y=8돖wnhqxo}o3L37on*f=ʭr+ٕ+pV~Ռ7VhOg=uE=@FBv9 { >4DQ@/ B`%筧6jiFF-=P6QBhcDaɖL9]z\\p]T:iAoGT2[ָuM9|THvnC7>tᣖpc+8R=r'j\E'*} Ǘ$;MWqz\BpEk@1ZNxQӡ.5otY? aiPkYdA{oAQc6YB&p)}nT`3C^T(?QK&wbkj 5!1NJL $"XTG1K|u3fmblxbUA9f ,YT(~-ii z .\Zϊ;ιdSN q'@>>9^I.v=+7Ȧ95\;]w#Zpy$e KI5FHXEX4Dwj^+$-TK0N0ZCVX|$,N%P g*bFSaJQnrѦkE `7j & u^ܒ ra}('"?$]aQmR i/w{IsRVu1"Lka՞M?ip `燬ت ;Sqwܧ去jc˥HX*"0fAwŰCD #RQߍ}M>?0 >z.&`^_۫cGcRRKS!Hb%vP$:QqTq͵"0)pM,6Bv)t 6(Vm KS-] mnFp#VKhi-kbG;JT{NJvifC ^ QM~! <@C (6脰) Ztıʠ{*UXزkX>P6+G@Ӊ7ZWq'f;Wx(W7=yC/ȋGYg+[M?9"<Khݳ#&t&XQP/S~f`ܵ }fİKR]vTi)3 IFQyR an-lBUR6Ar0S.h] s4lp2+3hܦ&3twWWVw|8%Km_7>w#X7uy-*Gƭk.$#$&q-~ol8 >l3XM5%0FE֛B'm~V?O-L47q¿Rrcg+ )Ս+%RwD\֊a/K"0]UV:a"p T4YΪ~l{{%_[ig~i~9J[٫3W'C9r'R!͇7GӊiZWFu,=tQ|uYߓ?*?6ڝ?/c_\_4@d k%\m&|/q֛z{џ83?O^xC@ן Lt~=74<_J_?[ |a__{d8/ro)q~ivw2>7g//&o\r1ɠc_Y\Q Y_kK}|gZM'+f>Yw;*.x>Zi~ur%޹ɰ?_P u^1͍ǃ~tpe}}shŗWYѪ= Mo!(xf<~Υ1G_Yt;O3(ЮmMyq/E:UV4s(w?q~t2O׾Df*~w}W|N*  BG5eFy7=]|MS֘|ryf.,p:;,VM7?(M49gIi 'D g[Xhcs5X Oa16)j>KO]3专}÷@.i|(^p*1\ϣWpѭgU%їltl|.}f!~^>#xdi5Ç[SH˂Ksk5ׁ2Җ׷x-_uYt6B4&3JFZ%D |eL\rdTdCQ| dMLe-5}SJZ")u)LĜcQlTXpŵŏJ21jddgﻦu _E0p $ %6H"#^$Ybq8dD(@Z$A9x8REu,1wє)ӔH`(ZH9ubRD(@$"Hl2 a,S)G2I3JXCJ:IDB'fgƝ S>SUgլt^l+<`^xA^<8"C^*S6qwyC1DDH4k81τv޿{t?+,r͢qs"CMK_=F6\v-jZ[$q,䯊bj6_ҀI -?Bn?/+;+ܾ_L&|ܖ h?#;5h%Ks֘ .lD?>3hO? S`}5W^&3OV _'6|`gчI:'_Wp_4@B5 ^~ Չf× P8U*pRǾ~c{<;'~f8jz׬H;d)&Hq"'|I+4?xla٘.p>ĞaBɲ:ۜ'<_;z/ΐnIg .N!`)ATED8[u9iw.REKy^-2-<_(u ՉA2IP|Bm{Z5Q[xh.Ȅ^!޹?Cz KX5LҵRe1fe e7KݒAU6bZp6m=2]btwL5?J# p& WK3eTY@gZ*%!-֙.not>6B/= axOꝃX{iu<8 _~0* " )ZBRϵh/-r!DxEP19pJC)K&VH FQ}Ȧ3r0A/'A(,l",uePJ [׷GQ5R ќ̎v3Q 486TZ!y$N90BSA\c2bsqܦV pzxj,qYV_ZO r`\:S$p/E>.}2&̘:=4a۠Z{2-Z#?L>]iL $YRyR_#ã}ߢ~}ˤݫ?twy.!%q[)=pɤ͗Ƌ'-rQюd/{b&i燸_'I2². Áhpj'Zesac' 22C}F>(Ca/> BY Gp>M9_ۓmYÎ^pƳl&萣MW"|uEkY~Ӝ%#樉]g2:hĮK͜18^bׅ9'=iGů"tubs1QA%҆>i1FQDB) ں@L 0TyVaD5 U#hvZgJyTS"81KG7XrMN*b_g,"L^8~Y%I.dRK#,)#øbGVr@:>qnIS9!ڞĉF&oVzj+#TB2I(r9P00]2v`!fP}MUWosa9=(!}/&?z%d^b%e +o( #r8͝:tސጁI>sA)wi &zU6GV^=EAzA1n@ C3e#0G‘\To0B3}mX2sB)%GE"fEoE/r+l}tZ=>lUrZN`Kդ9&> d`q>p?Mnn9`6(3SБ.F0K /-? ) [7y1"rQۑ 28k,#aѮ (-DJpVG|:A (H60gF9_la:lt>"nE)ЩG,eBt0x ,K ,2.tC5nqBp|RH—.+dD-( /U/f(g {;@ԒK@B+UMzUM>@tj!UPatUMD FPtՐr\1cHHШpie@bzm˘)&B7V& +oWفr'j9x;M펰L C:0^UW&ZYV&14U3+=a6&Ǎ(L3A]d& @Ub.TZߴ8)2"z":?DioQj1Jͨ1QڱȈ*}Ύ; @%qz9C< M*8q40NAQ$P4bidqv#BVSMC6}[7l9{sb.[BOoP"-F,l[41"ɛ:1 dHG4S3`iKXb6&\k& n]c~y[wK@VR+ds δӂFU;4Vnzڽ04^4%#[0]ei%O%Ԓd>D ^Pڀ>s4򲔠#Wt}iV_ME*LrƸ$(j2J4MF p3fS]24\>HApN7[6( }7>y[}bZTTg{-*ТjܤQl٪uV5C\Z6[9*X%y"bJ2r{$iUvM5mc&E3[D:42Rn4"0Rksvqzޖ 7*s)GR ./y=z(Œ39!f-;BpΤUKM;"T_4{ ..=7À4Ɯ:&4#Plin$1Y]pޖ[ **~ᚢCI8¶mLUJPn]_b=>JtKŎ,@gpq' qHh=чOB!93'Tw[AyF(lr^EIq1sw4:a)&OSPEWkt) %QqFAQ;aji}F΍K[_8-AqM<ȗG3" enQ` ٺ;ёC/LA&AmCe'Q%gK̫VHu(Kϥg_Z]JJ,sB;sL^h|": B_ SaAPzmsPFv[&2e##jvuwAY_=Yoo?2 h6Z-}-7iiDi1XG>6,߾]-<])L+G>n$DVGuxL.4xjCDKFyn#o>Y >&k`۟LCeO>v#RAj>l"#5nO77o(C[,EO70? 6} .f?+Vܗ2#-H"J ·($EA`JB"nm֑lS- ?U?Q#Gu'P% M7]L̩UgpQXu/q|g!oo Q^۽!y @"\+2 0"ѩ8*R)$3*yKjA>(e(ZEMf5 )P-}3=଺h3Gd.2eNb Lshs[G>twnge;U{&S |ZIu#qt.,p}XXJNk'qy˚LH#PiLaʨh7yE;|IkhFSFT.h&tILɹh%2K(|* m`Fn/IOG,a t#^nSs@[/M5@L$鞲YG_[GjigA!|ŇU"MIZO#t8;4CQ;8ŗ {]z5HsZvka;'y :kY12u6Xx>ه79|Zycjjݍ`CJM-R0u@뫮`HA뫮t.h^<܏G~gG=dN(<'%YTl목P+~qs 7U'|bWM=zLsd$g'&櫮FX tjAo4MFhdo~UWMIt_?CyOOW^5A0< 3x/<ㆵ#F7B;fQb;rug-WE3IիZf?NT(DAeL*jRu{?Ɏ =h#絭QGbjvbfLgìΗٙ]ᛲ^ ( Jh˫"Q \zJ0<=xU|< ؚE`뫢U}՗{^ykn 窱ٔTlxÿE;`&O8~ly aT ?Ҵ4Z:?4tH0XT m 6{%ޏ#CzKIT}|?\cLR A#JԱ颢p^2wh9)wm;~un'98:}fiqU!#2[ LG%YՕ`ߤ's.֙7IA#-#ܫ0Sֳď/Ʊܚ{WT 54㎠(9Quueb{6o kt"s6r cL SӅ~WM@؋=&ZR FNYwZJ(~N;1pm9m&{U{pGNdc:֑%Eg{`iߡԖznߩ8^jd 8Syq`UN:ٰ7`3.lXu6S!PBejީx'XZw*:E;94K9omoީpUy2@exbQ/!Z j_J3c̀gQzO#}GXJ,QCq`i0i+m%tsBަ}+1}LRb<0{E-"f \Z@,~fvӷU=D5bXe EΏeBPdU?O[}51>~r"8|Uq8ѓ+i2C"exrˍK?1&ex)RMh/=SN+|~=7ߟO1u].ݫ~q 9˵WzG,V:8M}XBI0 x5P@p8idZ+ͱ j./ ɧ]x7I|lWb!K}`bj<)MUQ59H(1[,om7r 6¬bHLZj`X[n` &_[`4:0Jh{:RLdh (v˄t1"9#‰,˛ӿfK|cA?7y1WNqEQLaJ^wi#c4)rǩ;c?޹G=:Oyrqͧ X{9". qoaˢqm8 VHd3qxrB0+<}R/oh<u" \YD7me|C;dE1AѦVꛣ˙9Z(L3e֘`*1S[7=jR}+Q4Qr~Vh ^ZxK*92O%͂:?S["؋"burnQ0Nm1ly{(EfWE bX<[~- Dɦ|ȝ(8P? )FSLӚ@ s'2`Ƽgia@)A&J3(j& ,glt 0R+sEyt69TC7d.%j|yAy̺_Ηڼ_P~4}[e>WOjŚĻ ?=򄈂Nӻ W,|1HsB"E)"{O/?sқrۼ-&xZ t\-sosp{kt{uOngO}{&*Mcgq3)gg6&riQLBE4"R`[cH\*`- GlRqS 2 1pI-r J&IM( G_JMXZaQEk6uF ҚSsOUN(RNuZ:ۤtXDuPyfav'MPAt}f*oON&?̗ߦkv~{&WuNw }pwl_|剡p7⫸Y,Gj$LKF);wi诂Åy>?(Fc~s It 2ߛ^iOvKWpXM5+*:m@+bZ'ѵufukCC*ST#0VuK+`rePEurQǺ d#ҥufukCC*SNCDǼWl7U)粝WtM%? 3UKBNOR\UR [4K+0]w~:=7eۘ,LZ.UuA=`Փ]}QCTaLw3Eϕ.(z+rIwJTcK{.ۯ"me+fk F~%J=CKBbѮ\wJz-DXNu |k GAvԶ Q88dOC=$&/ؙBKI _b݁µ 28\ЌHCb= ThS+0cȢBamN\T#.D<uzKaYɡZ1D*SXr*Z$6Ph둃G#EyZͩ;7 TI"POwYiA~#(_JxۡJ~ɳ Od7VKlin.ZXECDǬ7{o/]4'_FcOzm.8Tи"UcUrAU+wt\P-і7u@\-h]E;1j35r%aL PR/^$TrVsqJ嬁lnY*g9WWv2r@TSG62sXU:~p9QV=H9Df٥kz ;Š! b1zh'TВyn A[ iĚE+V *d i$;ԑ|i/WVȒ\KBd[ QĜvb,(Ee 2b|vz%۵y!7 HְyXbB4喐h;A.ˬD{ÁriNi7WbD$2|U+_Xy2=!31k\+}g'P4ƅL)aSwa˭Vh9S>꾌SRG!cQi=V=aHVSZiȖLO-HUgm linzm aD DB5TЍ =:uO#DKU;=B1CtdxuNR ȩ*M!:kJS]|%xar*_9װwV:`_n75O0EQ^Q:A=N٨>bgQ)T^"`cxŅ`GVॐFD0 3V@ngًװV 0ʕB CvC[D%X LGB9) ֡WȎr ]+9*% yAI'/@c6ƕ5R# FGE$%+R-1As |gs #@*n#ؑ&T쯧E\U4|'YԷiqSٞlra׷)F*6en7u[G4@B@iO>W|旷>bvqX SHhg c'Z 5g@R*h&59,E@)`iJLl4S ] Rga $BV!rUH $/𐠴cXiZl& AX6 5$(m0Fq s[@Ғ*Wq(ґ\WKG^1MH8!(m KHK`t *LhȒ)fxlJG>s3tK*Ѣ [D&F)$Mi|&U>LfB?rcoO^~jROeC"</ckցH;kK aH~/`_("Z]f lrE]yF)@٘]-ӢT?y :0H͹ <`(< I{rr zĴ 2? dc- v , "BF%&Q5z*6g~0?.7VEUh_^39 W -a!\g2b} ⫝`yO`sM$1"5 Ü%(X \"}E_VZvozšIduUISRH"m05֠ ,9ڔĭh}&w*m7~\SMeA݉T`cF3Cxz \FFVٻ'!8}(K+qjhh\v!H*yɰکmx]Wخu:6ENE]t~W#h+A["Rq 7i50ܑ_mY; F,YDtJh wAj-nC-T^jxs+8CP˵~\Sܸ0A\#E,J1p%FG`BX;I8*%!Rp,2(48mR 6bm@\&b~D@*ևnxZB;t|߇UuqߎfScvs7۫A/Q/Ulپ}Ngl` |:24[MC6Qb&Nؽ]>^P󭭷 `3ȸE 3]Y600Zٴþg~x~;+r-'s\a'R)wk@;VR0ap[x8u %X@(l"I5#C/0 QfPPbS:%Qpr^}f:ӕٓUkksJB[9$ކ`Rul"gzY Jsky)V2R#G̝f/$GcO~Gڈ tK9y/eJmrZ9_]DKJtw sVҮ9CS9xiH ⤇ gnϾ(a&-oo|bv "#OׁbvԸ̞U;?A^_^p2_><6O/I=UsޫNEUBW[:bKMAw:,Î.ٗ"7P h&(0qgㄝEᇣmj>yQf\hq\TpwcDU l; 1T&`Y/+xPk$,qa *Qau(Λ OB+jb;ۧ $P3/7?le gsœFۘZ2mdHrϵdz u^cΩ(9/*]³㩖Rqt$աCS4:xh 9? E)o-|Hy\ZO+Tڱ/Dlǥ[N]ʠ> /~Bt2t f.a!_Աq smtS\_ʠ>-:thmtkBp )kd1gDP1g4A3֣@t[0uѭ M4Ħ{^jsD3[ N3֠Sv-Ѻֆ|&dS^mtӕ3[ N3oNOV!Ѻֆ|&aS&N>\U6˓, |d>MLz#gfn ou;fv2ɒWEzGſwNyYM%Pԍ*:b\\UCPS~ԁ *)޼{1^ G7Fy.ŝx{-(:@[WxnF>!^ <ͨhcyo!I0]95+$*||ڛdg}W V6LN c̓SQgoAD`* Ђ6)T_Y S&)Lga_0q_Ȓ+I+VV̰&z%I9Sdb0IblOa2K=HFmSk:B-SL:?1ʄdܦohr{I Pw.FC떿F<?)U?p l4#iLGHw>Lq"a4eP% "M8dDWɏń}TX/尪Y>h[N(&o Z]{Y(&??&rLԅGyUc6~0%L2ȹ)H݁I08)Gd_Sa͟'~r3tx$Q*F&~#B8*Ooz:m-}ͷc?$sP̍`J' S\28ʼl|7?,.x曡%~ll5~i㉟o>_9;@^;|>7I]FȎ3 r;I{C蔊 ǿ!N-Hfe Q83,UqX:s'ozfTN37l{ f/e?z3y'gOJT&|< a5QojZ;${0ރgMG l|UWE'g?%7(o)wYB0R:nNtϢ{kx9Ci W1|:¦T:#g4)(D jAX8uk#EC~#q BX uM81% m)f:i4FYV<7f.kMһlM'i>}wH!.2ae-v!O_T"h2ԑ=zl|7|[H϶N{*Z >Xy|ّ>̞MϰAܪh} &]QsC8scR^hi<5@׺õp;\YwhNnU^DqL@ZԔ]AG»h{i-k:~A!bBжk # 2,~ y{(} w]xM($Vd%n|Wۂ٧[of_=cKst@kD$ʦE8q\PW6-=w] 7e hմ-r5"2-gTswH#sنW:6oTr˒74v9XE^\K_=H1q^ + +ՀvMWz,TZz=Χi*zZYmdW&,z☱J+T-9#khY^ba,"WaMɑsҿvFZZ M4@W:hFG qތBT Nì9AJ$Ƅ8FVQ jǴLJ(@5 ^ Sw CSa4TLC#qZ (#n6qdųTT(F*}E˝ op=sܛ 1כLBsLRJvVw9~{ǟ0Hʾ헟dA?&ӈ/q3CņK:L'i"1eBإD[u(1dp;㜤sZ?AA߰#'eC%Z)FPBxaUHuVL\*1oJ:a ʑ ,I̒1 b_SnGevH,%#tOgM4F6c2y}Qm#0% <b́RO\kQ?z9k@E%}َV&"6!#bL#:# !DRS\uhOcŗXƒkQn)$1Ȧ1K$4iZ|aT@%IK,/NȔ41kP]j)aeP:l),97<\mI$zjIbR ܟ÷'kO~֗&HxqDf&Ihb1TT 4QHcJ`0R0q;}5"\vy3v}ɔ=œƵ_|\>X#س,&kTXRP=SIڢ 51taf>#}bxl[sRy5yo8-~gH} EkgrmW=zb?&=S_Ij1p՞">8(y+KOZ Qw#r 레2cC6;~Y>FF"%rTr 0%\my8Re( tѽt" 碒zd.94m{f$+܇L*L˱I1 Urm9ޔM&ydMf+gg^[g>Q<'r^epҧXSJ;6i"DsYGD'c/`7J 29#LdPǮ|K Bw|LL{]oiz<y <9PxjW|ݓ3r<_].S0tZ>1!rof*]m0vI+5"]h! |K|4; ϥ@^{#XM"ȍl0w 0LvY;BށR SgUpj2zX]PȺm/$Є0ɢ|OR%t4 =.φ(g^18 B*yו::,)H49U}Ҍ0NNv"17a\@ S`^k~yGΡb F\|*$ _)čOk{6d Oi}Ї ĆK3=m2Iv_"9zDil"9SWWUw]3~j*r¡'c;V|kxDHZ᯶b,Rٱp,}l>k3JS&QZF,jAEQiTKg;L&98k(f#P<=s9noVtcE?}3!Nh{ -3gd\Q-㳢XS! 3 ,Ȏϸr%_v{0i˯?1+AquRԂ0E+D9_UsP|_q gX*%"pTІC쏢t<&w1qd?\G=)SF(QD`*Erᄳ+ݬ8"p0o=.iqSG;Y%>z0 JtXl>1,dlA qH<%+F$*R2m}vE^%vk>c(Fv;7W'&8ёv)q9=DI{#,/1QIv͑kg 3,zu{B=ГF 9$N(NYvm⛹G*! >GT(aҵj0eq:;!cл:(cp9ehMe$VL/ sx!T i`H,"+trPA찀8nڣ)[!%3WM0,F`Mb>zV:4t`Z#Ttbb:.w(E\lҒH9]ѿ\ڝ{`K?#8EJXK gy(2Bb\p1Jľ-຅CN8E46zz<_]X5]:D{H}iټJHEmS J } (m?*Qb&$VVc*k;΃5 |Gw:{#Uɸ7ͫSG9`tbWhZG*-sW~Lí#j|8=[^MφX79yWT5Y}PZռ kPrKɈ|`Ѿ;~Jz<4L2`7ژ`pchҡO(4E=۟.f;?R/PP;yxkժz6H̫ͩZrKV hLqQ<$J%D*Rx[~8v"e3ӚEQIAŰric΋O9Ut/-@%%n]%,F;TsRͅ])T( +Iֻ 62^BP;#XQG)I*V;#nm^K*"xsd2~0݆ Or˴Bft{54QثcuץjkY8Wp$¡$D鱻ݤ/ pA"M|)ULT$DXvv s݉K97VӅΖlm cִV3\~op?h~GIN5UѨlc2 J N\\^JX> m7G^|ߘ>%惛͇*ĺ|H,A w;8a?OE2p/@:?- )H0C@e *c1$o~k2'sxno{'H&_ַZݳeRc[?99lC\ˋy~<:|v75 2'#_w/l8/G;6~S"s6CH*MBG3Yl&yF˳|1,R8Ax/8sLi:knnNW8-?sHgM~_Mn ^-}dXI48DcS1H8BKRka`;~ȕ+.<(!k0Dźc4~hL0Vb9v䦤21imbkHdtxۖM<X>6p}/PYH1m=7v7UgA'wL%0bG0$[N!_#!*F,fXn_œW]]]=mIHlzX%3bm'0Vu՞<8{md+ӪPf+7Y+OH }Mo~ dr %ሳbXbE`0=Y(]ݶzhLiA 좧3V[nSZ ^*P[Z\2f bo7@.Sbsۯ]Dą"*WsJwۛ7DL`~fٍ%_M?:]׳gߛT7.HS1[o zì45M[=,K%AoR+ k2 vvN *aL'&~[LQ"+U%LJ|R߾ڐ5z!{ɛ[9޸M9 T B"T#ٟޢgMnJ~0D#]s*aw~{hCW\,Ye^`jj~1aTuުpD8uϒj&1 *hpX$GCnvzW0D#)zPL 0UGJxUb v˭lHm0ץMU_>萢wGyYkvrb/ۧwV:re$84ukzWVsOPMk|[>shnB%sbyR;8d̆e+s]5l{?M&]||"=OqhOFXaNtnU;Ąs+[AO[VpneD%ͭVNT{YgaW| Z{85af-X\Qa ;߂!Os HHj#0s;b98J|t֏sts(c,FmPd)w~rC<^ҩm+ᖭ޲՝=x-+q~ 7՝r0ݡT;"՝8螣6)otz:_$k_r73<)d~cLՃH^|sa$Ď 'vؔكB8{UL˺e#GNz>g^t|k0/џ p,?w8[% ٘ġG[W'NQcQ@Hc"t 6&#F%)21f2`Tp(J崽Dn]THժyTtj{$i-*nK07&qn߅6q%KXlx]7Ǟoش]`{홥T^jvL=6ԒB* 3[xLmLJ2J,יƄg`D:$)*ybkm@uݎ*HwѠ{`A?K?Xh 0% \fY&Msb3%0&e@Im 2+T`c3eUȭ VoJC b…1ԥJe$@9RdEKq'8BY*LƶK4"dh4vĨ15iDC2ǐfN,IL,w%28vܪTf4ĈnO (y1TUKLe MlXNזuI``B$f b,S:MD T1Q zAgv6z"1ј4"> 0Vֹ r\&K-q8F%{X jFB]|ݚ="Tv΀ay~OY”TPCRSg]Oi,4)k-!e6t$jb8{<$GҦO#rr}π,9*(G 2a:UX*@ZIiw 2+to-7SjޮR/TNlԾK2IHTnDf3jĞ|Z/.|w =q\4Q5]B 6_^~RSD(|7e>,qf4`cMEػ߶qer@Ƚ@d[r(Juԏ=PC~$Lq,3̏pzayAџԜoQ-YmITm6H"Z}x2_ӏg9n%5–N}`oM K.?r3y`צW6<}v܏F2~œkOJ~s妅XMU4DTDXuøP[ ʨNhU#68&p-Ѳ֭ y*S}w@n!D~!Ƙ룀CVFCbU35uhW {nzPumuMr"6;6BeFe['JRIWR_a,EJT #PłbDŎT2 Zc;RDj`#rJh]fXrKGdlB|JBVZ0!VSeh & r%*V)78MNQ]R11P /d!8V 5E8AGpƴ`A%qU4D0aG.G*2GvEnnuhWJqn[7ɎʠQź=*+:7m݂-kА'!: 97 !AYyEAo@PLԱxMָ~?EI\ G*QgCB=m"9X&F%^E"ՉR V%&%DHhY #v"!TkИWiO`/F7YڝpCŞ3*dSI KeR'rL6P3!r{1ɫ1''p]z!|Ɠ^j|2y/0ERZM*{C@:! !y8Sq8~ !v U4H{m;S@ )1*8;e)D U4HU@.KqkMb eP Ds2ow{[Eɷ]G7^a;\H^7=0CeP*?& qtxԲ;5hWѽuc gMPQ?o^ o#9g3V*s6qVϧ)n-ս­Oz5`,RFbg iD,QSӂ_CHf8s.m,f _^b.?Q( }`vfctfD&ODWگt LM&hI& "UoV̵fVy07濋,w 4S%n$T+TXզ&lv<+.@2<<- H;kڛn@EI;6)+LM ==EfFaͤ#F?k{wjDŽV4wemGGU`_Fԍř1&9ą'^ff;I%Pۍ~|ݚqoЏ{{>_L y!P̗E|eZyZt'51/d;M#lR1:֗?|`^sZڗheZypl|1_p)^dn"~>7Q<6į ~sH|WeL9i?={Yk]QlB:ͽmAECMt= !"KhV'ߞ ?֍Xӹp;MGY,M^_ 7 }M|ˍD.H$asu72>?>zab{d9w_hu=׮ l܀D&q~| O׷ /-4^w(V@y=z0wi~>yi_Wwsof/ޗH㑛Q[޺օ.u 7O4*.aoXo"y>:-hA@ ZЂ`^ds c9|y> DonSw3MTf`ᢻ~V eh_\<ѹ8Sg?ܓtͥ\D?s tw5#!d,3r<e-QY_Oe2+⤣Ier>hb9ʬrpg ޯV .Ѳ -_@I.'ipӲzZv0PH*{'E/oamecv lgav*R*oqLw4@eK'UJh"# ,wx" hg~`Śƈ,Ht@">*^iJvÏ{{f&Twmp"nKa(rV&T\wTo=@Ѻk h k}b~lt ;<nʴxWuo]}[WT_.;†6 -lhaC  ڛpӟ5rޚ:Z@PQX#mm0mGQ[ZC2YsIY?ͪ_j_†6JrG}^=~asvZY尘&IBJ5!@3S7QΉSs 7LXgwQ,L5,)3:t|`bcYm)Fg62 K'v=,Ob ]Ur0)d'7XuJBWM&'%~5BAk`Ä,mdB$K3<!k*AJ-(S^4 OW Ҭl[Nǝ[*Kinz&fPivDQ-55Tq+l`RcB.x\35or%@է7[EI& AFVVpy`+Tg B( s,O/CB hi|hZ&Φ|]\QR[T݊5ڢu o]x[޺օ0ex5 i*ZЂ -hA] i|gJ'*IDIݐãnH[7M ٧nH" .X0khtod4!ɋsWDӣ%xzՉQy>_}}TrEc7R7LzO*"͸" K#F9Ju2! kq|j%eO(D}}EPg`)Y`GtU'p| zoq>˗0F<:c{*8eE$F6F]¸mN}($~pB'~p;y' },hx|taZqIWLCϣGhlYqYu(#DZ n"}jbK̳?bQ1p,Ɨ #>"+4[/fo|xMRai1eQ=m[ϵR:=Ўf$v„[Ŵ>[Ah8Sx2rLx旤,:>:vIeT,Z5̾ ]:O-`FތƽrP#ѐ'{ fVȠ{qnS˜ٻ6ndWXzIeC~Qlm&Ǯ8xU3'%)ViP&b%4eٲHp4}yt*mG/3{a[o:lݿ[20fpӘX·u0Kp.QP)Fw b:ݤeiU ~*,%E|| k1Z+EքPh.gŸY3S7eN3UʪZXa Nzc-U-\ԻԯpDh~a-DR5=is]-ٰuFSޒgmJq[=gM$nxl:JHo.Fòˇ**[sP{eKN/ȥ:^hAl60y%[]A )#i1j#M̠LdI'=hYh>J YF'. d K^%5U8@Z6's6w- iF)PrB2 ېHt7(IKQEpS]Qn0AFQwۗ-w| QHyB2 ې(;%!!ѕ8gIHH, T< )&@ (J%p"H Â/, dZ xjpLЊ$5/Ql`tsŎ7WDbݥf?QX%lMtE+ZAa3/5&/G]jb6}+q{ A8dcfv%`2)RNFڢgWr/\cRyǭߖ|0Zb6 ۠0;_d պE,$ B_0**@),pF*$Y].R{SӃU GP/*>%$8ptyC(j]o>ҀO*R,42UF4f|rUˆ\{'P E Qe:ʭ0TWo||J}WcbXwER..+r|L W]w-b#/+L}1B#MQǏmZRr=}x[ٽ7!0EI1W V?D0E2ͯ/_/ǫKX+%ΖNGx07EIfEf{Dy<=xŗm2]ۂŻV׺xfaKЬ,zBZE0oHĴ"-9 :hGO^ l4E6B8b!pdS;]crH4FRD} 5cI9pPP*um6fFq)$ &HdkN=}<`fϥ|mEOMxPTT;"{ӄPM$ve3o'n% ,0Z>I=ӧX O6sF,Iw|dyzkD"ѢY. ]&nD7DKYD8bI"eQP"cupFtfscj>f0loNFfIu%40Ɔfy & $![ʀt^BJr6U5!M8ۥ9h#FMmHgѠ(‘B 8n bW PuO:#rQo\0g"`í\ȬKUSm2FYDŽaSDb$BH!̤`9$t#.?9'O&R2ȊJpY"tLS%|LI18E(7A"M!?~Jlm-jpɮ\ _dwͲkM/4ćLlD EՉYB,D5 qM(BYN7ݴӣ&1t3  0vkaISofm`dfPsS|f¿f:./#x[p㚢 p#8dHFm)iLJZϜOȤkyRv˝&&M~1DsLm/%iY JBcA>^\4Dܕv5f'm6V؃e3SZuTJ5z8djӇѤ|}OvZa7*n.@JʦA lw!=]r%0RƧݱ^Z/aV0æ| .T4+#W lЭ:z}!2zAro>4|j|%o(zfBGƘЕ z>2uow+W0#S#֦h 8sX|=j.iMKD38l a lM?dCOAQ|^%Q+]Yo#G+^آ>Г0vflkϋDє!qY%fHJ"ltReqed$aO"5_6!{VwkS8'ҳsOdru1w~}<6lg7~y0qOVVk{6}KL)h:|扠~W`Yp k&W3q\?O|?Uc*Ԫ-\z3uUX9[oIB޹6)Fio!F+~91-)KM.v[PLK u(N ŵ? *W0v,(1PlYٰj'OyRhapnQε}?3\SUM*Ɠ߫|mJO,?KW׋ Tr!SCgE S&{fئLGADYϞOeS^I6zě]&qʵ]kM=E tpAhbS :!MҨr1.j`7IM/&aub/٧|w .c*¾ krd ֍ƽ-ƺ.|'CKwy `-1{<6pתi_9X&Xt~d dg ;vYQW:{C;2 vAQB;Ϝ Y,R\NlCQ듳ꌁ0+I6ǃ9/室H!ߌI 9Ax燵ZSW/fr9З)U:{XJ8Zv >8魁|b*}ܚubjJ3@N/ wII4^v91r>h n Vdz7Jk`,31BLw =jn9J@bg0A J;C Y jJx/l IJT2>7P~ \knq F4{ ]&M-Z r.Չ|#[)5HE3S{cx:enn0g ,Z߻5av?JƬ7Srgk]Rtu'4@it}*1^;[TuČ~iFfw}CY/"SgJP$Bқk6_`L@ - Pƨ"@tjc,+ydt0;NsҠYli4_'s`F@t>S;q,թU@Ji E4gz1GXWj X#+QiPi[п]v'+e|JbWJIYӎ(cb4ŠXMw)"`#dJ9r`Hd`-RDOFާ ltzUӁ>M#bk-TǢ&7ƃ\`&0YMQp#Yzu Aү~.=)`hax(9FJk sX=s^Ӂ-X5VXε^Bj G3-́a%eA)SI%e:()h ?y=m?yj^-Fn_{P@&_!MWa# )\dN8\: "åWT!1 `;F([Z31JRvbc4GÍ݀1}M4mHiD&b} VjB{ń8"A:f ^k-k#ހaX$=z*Ɛ*e$P#Ȃ8:B"j61>R10 #( zdEMDbc*[&T5&S~rը.k);^eFi86L#^(Ad"Ŏ14`"s_L Ilg1jw f]Ŝm%-D+Y۸3ry@MΖ6o8=kFN{g=XJ]Q:#N'WރmΚgXmpL9SH:rG5 2 Hб3 e.EQ^HH+9AH\#UHǣ!J1c7Q}c]R `59>E-bՏ>J/؄-h;r2kȠcLiI{g GG4J"AsĎhLI{ ֝u2 }ԍ bn1ܨ`m(`hCVV 7pb:Tk\ގIk?z1}.2]HH e㧿~tqhZ&scm gO97Xb+m,=R`εȽ-/13U`J6_`!Q붼`خt<bG5Pb}.y Nɣ*CH׌۔z1[*Ekc*Rx4 z157s"G`0*:txE%uz, g+H:-" a^FN7Vk(`/@M5*uƔk*혋!ބJ:fPqXҀ'bNBXòA7N.PJiuh~ڻ+/F|$^]>4/kFȢ3+m> Ksnx IN]~@#jG (N0&Zx3;'_R״`F[ j>y_S}A9r,L`1q70Tׅ7v|WL4wO6vS;+>RpG>:5 \5/$V4$末з8AlSdi wlU? HoI>jF7V55ot߆)NcSMl1R V=cM]{& IE$^t:뢋츈eK1dǁ.VO-;-_ҭAS<dg >jfZWC  8"pnGWAL;ESs!qh .(ɡ5hgUiܹZ?ɡ =~kͯ&w3p'p/ѣPZa 5D $PkV]/RGǁ"ݒP0cal#*}|a1SevGQffO)7a(Cc0^I%Wx8lb) -CIh zk;n5Xx $;òp2tvof7,/)&wKU\"x -ڧ,C*+ǹKZv+!.e㾽 ]P<3 NNmZpߦ 2Șu´:3ߘc5yX!'e1)CN+`ې;X瓇Ni`3Yxof]RfibX.%-S-+O\&I![o38H8@Y>9A'}ww>]~=\@zbFd#J"48vyW SB0oz=J,ƾt&F ?@ _pTCpdE݆[s}oK}4[KT]19G[Id88[wO9Οivo~gvs3qmc,J*{j NMm*ֹ~F.%luvY5vuˣg9<%2RaI65ۙѢ18GJv âаTjP"dc_Z czrfMynq}Cu!ЄژPjݚYrocI[0 BZm , S^dM Rckw2ꈘaHJDЇr?+0 ID[lbb>? ZUxe*ykjw.=q:*W#$!\D_,SVƧı&+ooYq4FӐRf{JI^]\~1^.LXd'čdrѹ>}& G'kBhɍk޵qdٿBNfr!k{ 8|Bu=d)[MjMd7EmzsSU%N׮,;SNHqe~UL8gGXu`ɩ2-B \s3%dd/ KlM-pa8 %q~_KTJ6X9s0 VV\Wöef@3L魬BkP Z'sD.0S0(1g9i`V9̜0hKN$ TJa9ʅR5!"Щrb<Ѕ8 c XR=!Kj]3E^$|Yk 3R[7sE)93 s߶\,x6(MZq%Yo*JsΔ8UTx`%bjrι`vC*JcOo cD5OqEqoX q4&(^FU 1'֮ljieUV[v +zr<-Mcg*Xt-|bW^,?-lonJaDc7+ah Яdu D-n݇ Wtzu9aӇ@Ǔe(g.v(-5ڒT9+'p˳?nMv*<7ڿ{¬m))Oi2bMuqm2>{Ӷ=q덎w =W?eB̂^KOc[ mClV<ȻƗFtqA~m7h*#fW&O8'fchMN>M>Ϧ1`#e ,3AXNٖ:9R:IړdCT/ ̱u9YϘ&S"a$\Yg[dp)WZu;.헛I}6Q2 ৔rNqfsQg[doݕXSW-SŒra J4JX@@T=f0Dсy "y,q(a>pS晦}D1{9&%$AYi~(zEM(!jE Bh.Q_7"H_wC7ŚZ1ƥc.}]X_Ŧ{o_Am+Og>d]q"ًg/qvQ\3O0'(wiI\ 9:?#YT.H69M_? &^?_#՝Q,Y0RDšG%WQc&A/Wn?}1^j'F+3\~5n+WkLV\u]q\RN{4"E12_|Qi9%J(PWT:.XEj*1T!|%s=R+Jqn Ik0ΌξTߵ I?,7*u4oSm1 ABYA_6pJҚE$bVzgqVzgYiZkt !;`0xS'5K0Nr &&$Z}Ț&->t9 iNaIkuC*Ds!ӧ9Tb"TpxKמR!spXgJ$$ݛ ۯOH4rQA8 `Dp0SP&X;g&c*Z(figD<Қe f0G1TC2a-TY| W..yFELn;]fvH4^1.?K޾L]H 9?y藫xF!W >;-yU}'%Pߜ/_MP~%Ny6CIJ+Gx쁭9b`x&gގFPTUuqJgss2(0 O(()D6sq$P6܀l65ъjVatVfkP׭υ DRT#ca[U& SN3!\m~ tP࣐ڿx&10>l')|86ŚFHa, ECij|sdJu5M)ege+_ x@FsX]hV~G?90E3͘F:S$>f5׍S| 4h+ꜴAbb@s#arD$&9ćV@S;`-G(w'2DRn& ,76W0VSgA].ar*Z9ۂ4g⾴+l˅ ;9L &Z4&̍}^wg+; " {4:Ks 1 6!-W>07{%0#ϟ-c [9) ڑ\\aǍQ!x3ocw06Dx.1x SrybF`,K_BTFO239~b0//u[ (xqEVJ2*jn9$b~)"폃駫B,]HYog%/~̭~W3M?sL3?eMLZ5,a{U*"+~n}ΘᷳzNh %qV'84'Ч89"1W'DB@K/C3_xA)!H m@KB|;@H_]BMA@H&5֫Wȵ{l,1LOqy%$4ǎ_IO;~(?%˪4_&K.~Kp" (IO ^M8 7otY$ZleQ}%N8@@x"Br2 Y]09Y]>Y]\tTu1AH.&.&Sa+ӎSwk;Sńt:'TbB]gpy)^LL^}r齆'=]YϖI F{z cfF}YvCOOg;/K5{,nGW)5\_#W}I^\X6E77W,\?o&x=wRG!x6<^kp|MS 5Bzb!&Ij{MLeWY1N\Ii^Ȣh/V=śɛ`z n<4OվvNJX[3}zp>hWG[~`^]_LSU޷WIjD7\^ͷƊ W{k^q:uNc4 omn+{tQ}|w/{˧za1m+wtğ#_FwT OoSE[Z>pp5wDc0+vw<tuwBG#=.>CiX{7V^rnmZ}7̕^7![i+=rpJuwBguZ։)Bٌ -+O~e[usW9hC'DjzJkx3wABx˽~Z/ kU?}pnCvD];ԄQRr棟޼9LǷ_AˋWy}I W򮑇<^]8f)KP)%w4e'facΤOn$Rʞ1 1(z >c72S48\} )ߩRNS=ɁUb;JhV\YSڽTr#ɗj~vGٽFNhgV˕RZ|EV#Y\ )~2z4ѻ=ؖ0 :4pƄr!g~IۛaNEңcLIʕ*yJc>M_PK āGL)F)I:} 5j5:JV'dFB֣dDpzA-Nypp,J۬jѱq1 y1|-6?JN=.9k][s6+*lr6qLrVrdΤ\ %<3? JiJ %1fj*h h|}h;U}Jy6E=wAL#_vヘVr=sd+c Vrcvk@ȏ?~~]J%gMjM'-eYX,Osz[`D Lْg/SfjL?k)=KQ >y#!/\DdjWǻV6*ݎw@AщcvsOK̦[@Z:$䅋hLvc &i)c-*6piOܛuhUVp)MѦuӥ̇nT>n\xZԸuheVp)چu `b1":}lcW᮸[ͺEuCB^FɔG_5qú|[,UD'm*aYVnuH (*_*{U7*U9uŠu;\SIfݢZպ!!/\DcdJZ}U׭|*/=uŠu;\eRf-zU[E4JvcnuZX NlX$ jꐐ.Q2 oX7QdbPibcʁb;V^ !/\DdJ?Ikk͏?}j [t9]tBպj&T/֪ T<%:r+_Bn%wCKn&]^W&PXn/iUlա T݈ T ʺ.VMp=1].H R&Ea.oUP]F`ͨscU8oa)FscU1rHcb5hn_IscU1sXcb̵j'}1fAYb/",HĘ."b̂)ܾ\w1.\&Hch1K$w.ŘkHb̒3Řs%m_9L]1ת jכcV.Ř(m_4t1.\&]o8dY1w1Z5cվ;ŘTbJ1Ř_DYn/Ƭ)]1ש Ƙ@1w1Z5Aٺ3E.Řk)![ccŘs#w18,+Ѻ<@@Er3N矝]Q:-B0Xಟlq2'z7p?+ ^`89)655lN` *K 2,YmZmfn+d6k z!K"<Yb],fn^?ABMpn/ʭ 7it8nbztWf#3lUm f:L lfdLB[ոHfoqmnNx637 2bV37Ƅ5|=?EvXЙ͇y 94,Z)ܶjnztgMmmL& I.n;}y&oJς.j.hD{%27b\&E2?L&翜{" N"dvU3i?"b0n]̇@(E@Ǒt 9֌{[~rSEgzc7xxOwQ+36\ onKM}2(k7<?BiWWI4p`0 ܣH\չsT碬:`$y!-)Qa { xgИRrӌqλh w*B]]S`UzBU̓[S0̢;Y1ⴔċ%R"B)#gx$^q_ˆy!Bye1&L#(T,-W&nmXOM~ U'AdNp껿)3)@7RuW<΀W%4;JNO.L>&BIe&uƿ}+)&r@wpv8eYv#Ha 9Z`L'.N$Ö&rhcl"AXɜ]. @;_JRA7?rvdz:1VyXXlAA׈{kHg4ާD;Y8_>E1NE;\4ګhYf韫*Uppm!`MMQ8ɿcu]A;;l@(+B(B \Z6RFp"SĸE5Hd(GN|FuV+!c%Q"=ZR2\LQ!qyscg3O1@Y+0LvFBe!cM1|0id}&8ƍ^'$VH j $AA*CKKqvT93dV)a1 mđ"mjF$F $0\(at$C53,V+$I|JS8Gk'bt;bIW.ݞp2؅6Ld}Y/wʼnW=vZjз{u7WU<_ߴ+?m;~FXC+%JR-VBR.qڷDNʃu\E,)ޤXKRTYhR9LjYX2*Z_ӊe* Q- F=a5;܋{v\ʀUOGƺNwJ`Bj` 9s; iUxDYx=P͐H9Ԉ2ʧU kc/]:[f5-2z>\z%$`d1VG ll&)RTL: - Kj ڛF;n0=jz'ɧxH^I,bPTnkzj7k]IN=>i9+9ӭw_R#eF4%-q$U|7HL{A^0 s=BQ\$kpϔ928޷@52~ϥGcW Ҋ1")%a70Hiq&o 9ДsO%Rɨ6!evP%Pm! %SNHtYd<N=^Ofo35;uab@`x0*f$ 'D+LLO!p,2 OVP_b.l":o9Z9</#>\?z7:Wg Q i{q0#@==BN|ֹ>L~64~R ?/._vvC+XaZq_"ZoތGٯf7c$a4Gy{gd<[T(>g S ,w~_ =aEo2L8 g2KhF uoG Nk&.e^56| Okm4G;r뮸73Dqt Oio8_1[xD.jpR|+V|X֔> d3Y.&y"\YTgGt"|`LNk ۳.of7'jݝ# ibPDޝs(кrW!X|rͻh6~_/_ w |9G_?G'}^oH񏦛\I?/Fzүn8\1]Yӑs;Y9>ʴ.Shϓ,xuvbC@Ŷ1LRyI-,phwb>Jħ$j I/6eG;hNczggQZv]ed)ٝ:T` FDQ8-m,wwvioc<INt biTڜٛI5b-ȧ_G|)ILϓ'1=i7y4JI,4]hw8i% v q ʕז6^qJ~ҫ#cH>sR0hMDkS)9چ+q]9ajB#Ck8Dʚ>C3mqڢR&+^C&cQGU,IðɁڟ%kYN~(v8o/k&q'MIq э!4Al7͖6։a;崚_Y#t(߯>|xz(0IIÜ 7Wѻ4>Ns!-,҆")ǣOw]DĖ&#f/=\_0hmZA#Z L.MW$%o9KNW`3àZ`f3O.̥i.xi10IhjN;RyLareF8ꔜ~YT.<}]0)o-N2k_k%ԋ`-AL*SQ\ع70`4r7f fz$ `5)=%X:/ 3(i6i^19l4Th|yr{Jh $gS/ 2bSρ".\|>-mtcפ, #*|ɫ=mxGJ˼ι ;3A30٠;}}٘ Xs e&Od:TEI՘6ie:],H>M lj0iћ>xh۴z؅@tX%XW* JJ4Li6 Za*GMMY\$1%A+Z2]渳08tetY<]?OϻE !-ZFb8r dL9 #&0p˱ETzc䭕.TPQT2F0:M"(~])ghp9Oy<i8+ZQR YGriƀXQ#!VGO iaa˱fSp@x@LS6 ?O)<=W{ilRx rn䔿K :F ũ;[S& A ضD{E0Z'|TsUlɜ?9H&D5,rbbaR@&",2Gqp\& 1IuL QɭKU$s[ Y!b[ K\0gIWZ +̓WFa&0 `͉0C !uD@]BPK26n]5s.R*P5f0gi-8W=bG"\JafA:Bskr>([ol9#+n_NO?ߞ΃ӴiYfNcD9y5(Oe"I*6 J *) $#16߁8/Xltv-8 ڨ2m-y0=q5' IF'ȑ2$ {G {RZι~SjvZۊT?>[ݐU~ Bo_lD;9['4Ug4D"F0[Pԣ# )gKTaV"= w.ЌP|m^$;x&t ưTv6|gu^3T(^9urQo3alKQXclmaVӢ=@靰PQnTYs$ A0Juuك (y&{ڎw=j 0^G98; b!"-NxcĺL v-ᢅx68sCbF Ls1FpekE P*Egx] ֘(0kdJv$XbJmu|EHED(ICMǕe+ubTPrA_dX/)~v;A9zM1ʓA@AѫYRJ0@^J X2rhp~Wuy#c^`PPTks$y2cjvl71H:8 lv Q_a"ऒV3 l-3FT*B\bsebb_t?HGwJF^!<=d#)h>VգRZnu[齺Ot!F=T +K}ݏc`d{g1$SBDTjP2hZ*P5P+3o/ g̶I 1gp@HtQ#\{Wy5$FuX!Z0RA5\gp}t< 9#LrbACay cIcD3o\#([, BˀXxdIGrKvo{ףfa[ژ_^!Q|6uE].dGeR .G/5C&{;{ dc2H"Op2׼8gD!EH yQfI֥&xpz8ؠ"Ayg$^ yžb+# 6*٥y"%kW`.E=H  Js~@3Y֏ `yޞ2HotQܚ&/PQgNOF2G*7hnޜ9$"δ"GVN e [&!@T{=AbJ$Ƨ܎]@SΨ~k.\kh:G~8>o80* 6Ȫ`SHE9`pj磉H#"Uh,5׋n|xJ `b~wbjUOvy~q,/%Nn.2D,)P*|쳙giz8{[N~no,鿟,^ /s ,"~dDVB{ZKm%u>a cӶg =rtw,2M0bc<췱rDrj9}+ (GcTRuj<|0\Ȫ_+SŚ@>m)d]HN|[. #2.coTŅb\R6QA* Rpr rгLp E;|7>dZNAmWtJ1W|Yp,5 P \ب0941rI( pJحB kq =((n>^<%(#v0҂,5Crҕo!{24kN:x^t|SA6CHɃz۲/@[T*IS%8$oO?̃tӥo塊ӄ[1YBŒ~PƢi@oMk?h,Tⴓ6#zٹOkgwOVrfuLKژdCi|]HO| >w\$VHB"&S$S #|kHe0Iz>ݞo?OÃ'b7JgM/Yc(JӬOOiF4ٽPi:}v9vSN_Ex$O~W`U%$=Ew`O :ޡ?|M(z#@?w)E2;y GĿߢW =O9ۀH@E' D(dF)uj d+PNNU|Z1wJ?Vr ӥ‚HJ"6ovX+Tb<ՋrpB9"X㚀{WbaB)y6(!p2淟eNH"C5X׍":4D n9i;O9]d>ؕhv^vQ-y}Ѹj6$$P [QeYU=7:9*^Ʌ$}gӽYsgc_{3 y%u_28փ"72iXɃ[Xofhzd_˕g?w< 㪖:]°XXxxX!A#[FJ]̂f_,d?OJqnIgN\zK Ʉ+ yɗLW/31} 99jܼdsT#_[F?Ǡ)UP E6w;}|ϝA-u}< uï8s _Eȋ,_)xt[RB*RdA98t=R $ S4=ǚ)\.~ B2sY\e^pmz׋B?}+mg܇,qtlgTˢ @=ı#HDIJ* Du!˝D}JuZbK`^7`U2 n_xsgqcoomvifFV==ԩQ,t3q5¯.+Sf,8g s$|Ky&[6Z#@f+Zb*%5F"D*Ek( Q]m,62&l) h\lPYL59ߞSbB̰FW|G.6toL:$_n`VrAE\=8Rn1[6df}b޿[&9 (1o4F_^߽=cTU_ ~34t쾿BS;.ȗ=~-9h@Wv2]d8=hSևSC>WSg9?PyZۆ˼H#6'ôDj1nR$8h, \F-E:/eゑ`UT ]2Qax4v7T^ʌl=? 6C@ݭ9tz>] -.S\.p3ڰ`Hx4X3_t C7˒d(3N-cR*+4͕IQǘ9k~`&ϩ( 5d%̈1>y]^95͵f)zc"oĩ[QUۛjlYF?U;Ɠ\VYle哋9V)n8nwӷg.ӻ7o(C9ho.^:adS7XD]&?Pl\ւmmȂ6~wv{zzٶ=[>Vc€Bd0*VCE$.C6.oyNqr5UGΜ5LuB:)}GNq+L65OER8­Ѭ zHQΥT(}ГtY0UsM ~dw-tbc^1rmvq_QAT f*J)'ٵv=&"NɎ¼yJ>t!ِe&&ow&xsnRV@}93{80{glbQףi^zGW8}91CFqE#\pZ uQ og? $!דͰlFQSNΚ[\FVEd!!E#}m!\pj6PҸri/3Ӽr0kݎXC+"9_yJ!3"zݖ/)z5Ɖ_)YZOy5AKC^Y>YH2X)}夢46~ْSیlʥ⯨Sb"^=ҋGz#mvjc(JT6h<܄drG=ײ<wjNں>}[NJZ&j^._׎F/+_knp|]NQY.|=%Z O^jk!% A,8M pj<@ݍd4(Z$GB5kT=c紊`IT ,!@q!#7)2%Kg&7B>T$gD9>z9@ \"$Ci 'y`(& JqvD_YH1 #mrG(uLD[ kE-Ss ^biΈ=i짂UPr沨oG*Z7;զQޑ/SƘ;Zn6>Pڑn<ՑߥUw@ymNIo3Nn-z=0x昸z\.5' B-!\Qoj ̋vQ955?5Az̩ݣr*|#gLLLLh2QSZ9O$C2)m5K1SCߩOBqF:eԺ>Y Zͮ+PQ_ItDl҅i4psAR N1LicCʣ7ͤ@ d턙יZ[U;KD&k#al0]$̃b{y`:bX֣6}kʶG|&Q¯@؆ǎ]&yvQneL!$bmV`jR,RktkGeibͻ(= %0qh ͢lPAr5eF`$ S(IZCѐ%K9FThmoIPAݨ;CBѓ89-Fs=0^p: "s8%h85^ϵڠ}k3=Nّԯ(.fԳ!F.m=OT80%E21C$&+lȁ,sU"i47OХh95mcj!&2EavG-_ʚI\ЄR @2AY 6AQ`"0rr\GY+f:C 1&Mc;kDV38F0S)7yä^YޜC&#G1>f@%Ƕf:Cf ^G1Gx@%ʈT 8 "!#8&eOF @S1@pfrу}9ێ$#qVPj5e^8p*P>98`̡fU-1A+Ԯ#3n<b= XNQh$ >6 UG̘8B4`H'BYu^I1TRo]PHjDzpi]G1bL(UAEmMУH GQvy&l숙8FB4J$BM#M[b_lN"wg(jJIt|' @4NQq 0;Gpp̀h)h>"`R-1PRfQi$ȍ]\Pe*nSA0kᮟPCVwljNQ~Sa 5C q6wۯS(94xfjt?֯nX/g1̾noNC.v'\1륳J2IrMnBc Qe8B兡Tq&2G*j3DzqܪaVL6xE'<76zkb@ä{s3R|-ƘJ^JRF$CJ~ύAoA2E7 ;̣NλE.@NO|MOck~IazH>-Y83HCw3'_xyrPlX^\A FqBiivV|P7J K.JY.H9}*;8_%5N~Yɇ;̥6|zYd.zT7G,ÒCA|+v_ "*OxoT^# dz)i6w{o8x}[Dr3E++~̞]Vs /Ljw2}FM; b WL'f (3A0 qh,2k1JaV4Z@u%fJ'35R{nٻ޶r$W .y)egz a4oYV$vg}IIeK9GЍFڑEV}bUq%6r/­+N|:6GSOl 09p?bq ^wXh1I%m z?(6ޠ 51Z3^>olk6i\l|Ig}}S<"1~oɷ%%\|:p殛N^~bqa-q÷Y:X2wCXY8 ̇×Tx]R.hZQ$-*-<҂)D\iWf7&}E$VȜ׀?\PSizİFk3b7^6R\#i !oM 84B%1&p Psů oQ嵨VuRjqc$ &}zM',g6ʼ伒U| g V:_^Z_FwW /Y;OZNV}іN?O'vI<~L??*=s~fDA$rNZ}`ht )^`k ݝ-~DPXnfyf䒖 %-XoO>cq} =?er hblͪQeʒ-w؋__)gުpA0gUMeiiu䛴FMϋ?h v>4/\LZ*^>-|4uEMm:qzҡ ovvn1MZ} N]N`VjHo`Wg%tVPVWkM2,l-|Fq*ɶ{ (Ug=:b/"XmA%!&%8 .pjƊy_5%ɯί%MK,5iOd]:`73*$r8Fz\t .r`)@G1@E q.d <B ,BL!wmu40 x} lҳZ땺zvVtWoTLͷѼuz20`Ғ\ N%zF&Zk(gB%H((!ӗ> Sg% g=-[cJM.)M;xp2 6J'BB=t'\G/ yZGDYB(Z qؓza;*إ<*܍~.y|5g\J`?AɈLf m9&0+\LUz(#)hC omp0"4@XYJ8  ,q?0WϯN4Ta KØ17}J쌰q$G} Y+ULUӨj-ZdV^բa=8p,`/BLPa]Ü!{.'\5I"`cUwR$Ok|2O iejJ潊j$OSAZ!`sQК7;L `oZ)7 T8qh3_ZM))T 1V(|t +q wǷɷ0LP|0w\O=#z&ВV)E ˀ$uw+ɫ⾱ -%$6G39/P0uB?b]u|Zʎ^8;{R2N`UK`x4E꿦i卑E`=lky&>G{[;!NބDȍ:gyFheT4щKKS.ȫ$OQFew`YSz^ZfgK:*ցUy筳J,Xi22Me{W{&NA-I IF\F0#e6\ }n6SI7,,M!꽙Oҩ/aPimw_IKZ_ -!G@!fzm ҮF;)8c5jTz9}/Ē/hv<cWozټC#sS4TŤ0r.bZ-c<5Z^9K"4?ZA;k[\jU4TGZirq @\7D"ǛibL$jwTOsF^A4EJeiDpі1Ӝ sf/ϩG+,>o0DclEv;JSm ^N]}ʘ%ЬRTN,^Nxr ,f鱞:~z{=O &w^7 PcA(ՎHB(5~gٿ^2&.UGY!%\/VSxcaӔ&%MŸ9&WN;t5qCqKW#\&p;}@->? 'wZJo͒(+UX(ldp2 )GˠxII>$ҍD\׶"))q] WDJ&9%Jr׍31&hPZ+t5A1 bC ^sKNt&Ӡp=I)>|S޳Jc}As8 9)Zi%]):s$ZML^$fJ`'M"M_ݸwgv:A6qY8R sML1T׭1"VL3=1aݒ:V;+$c\gf>4Ӵw #&F[39亞.t뮌8K%Ecp^ݐFrb. iyrA9=R+*:fxwÍ`XfHB"5#?R|ȳ'fS/C!=/.DC^Pc(\åC`B9f:b $CР[4RR0AQhOޫMh1=՗%XJQVy R$s>0H!(V{AD[x^Kd-sfoV=62nH0eQ zFL# Q&Xp第o{yյN=>{|"@onG =QbE=,mLz½nWc<{y ޜz-GÂή:\rz re$FIdA*02Y?Wc<|>R\*d\~Ƴ*k gXgB)1`v*rHgd0ƊC_^y;xeObog=e䀖7;kg :D;G*0 LYLUz+ wn=w'MT=tYɧ{hJh.Tfe ,֔I|M JOت)AS_tաk OZEIC,GH[<^RNTĴz釳UHPBIO!]*{qU8ȱmlJ'̤6+zŞ~6I1u׋W#eH>VS+VSBvi;\6](k';t8xbK0I݇ Gb+2Ir!/q$QQU -3 A;E[&SSjOaTS QY̥| *8&m#^PAv~f*qB`S ImdpR+ᑤ7m&JvUG-bSK{_O1EɄ&FnuoM{Z#BTZ.'bMd"fcU/rbNuD?J<@eQK}$B8F_^Qkw[ ".sͲ4`TwfmMHV.pVŶ%-KEk'A[0_V[ma#ʸ$buk tz7"*}D*U[WB./wiHF 'e"0T91Vv_0e^-Kr-HY"֙I%B~g9ƊՍ_e]mvLfVrr#- 6n8 S )| ٕ_Oڮ(sOBZZQ!ߧ{07NW{W+y*谻y9jT ZihԎa6n#:eZоռ*;LyxКLU!ʱ.f#3ȹqg<&o`nq;.5"NE9r᎞RFә<{^5C/q:CA6@QhrmNo.ķm 0I^SK$0'Z)c$7/sM҅fiQCINgHEOw'VJ%*+H #6 fB\ޞLSy#A8 Gv$ Ҡg 3{:R$||ܑG \$< :͑ bn:>ΰpX,E,GH;9uf[Tcth3xy3D qnii(LMvf~:OvtB>3 1Px~0ƃsO! 6q-TZWF(Wb﫢sO1,Q(C!.RD1 ,Q%"rVZƛHƛ~E:dIc¤h%TptB=R*;SE|,fQME)AG fN{Ǒ*LpDT HKKyӁIyK9rV=b+uyh1_vUԮjUb8 L]!C) %5C#@N&F9@di1o:P#1o:x1Ok}+EjT~Se;gum~CK(u .%pTJ Xmjʞ pE4x8l؀ 8Oco-ޗ׆5׆7]Ezp0֩ҍXqd#>"xbQ:B@\bʃŖFK k73+F>PfVy@zih:P#h:x}+W׼Fߙ0& +?HsY2ċ5 *:JsNSžAtןVxcQC*gfiѮm/.e-֢z&?o<nHh2daꤌ͍rOWK.__gE Ui1c['b4|"1 (q+5Uɭ֧Ls2_6^-)%mLo=d>+6Kl,9Yс-3M]GP&FiQ1}f5pWjzԑ"DzI@C×J& b TLw(8ͦWcҊyUE- 8 D0f'qf RbD1H$+(N"S4J!3 ;he:̨qܱ(IW‚ =w sJU+ԏ&` `!̲E$vdWc,1k`Hq9H2BX=!WǀH(m:PA+ᴝ%4!B цn?V;Yڹuvut,t=qgam Iߏ̑O$W$?n\w$dgc|Lo "l#ap z#;,e4XA@pu=>?~X̛',ܟ d~L=bA3,CXN{tzxLx:9a7Ӓ8}t¿2|žo?m޳/6e&aH◟LJãd3=_lmm?zr={=~~ /|{z__h&p}@Ϟnڻϛ̆^wi^}v||8_x6?w'9Ghσgr`+>y+O<|L?+[; / _Ÿ0O'f|dc5Ly"Y/Okh0p>= ;f~0L^^f?\ L`eG;6wOm~Džr-Up+>/vZ {OwvS/y>/v.x>601wShφoQ<>_|Ϡr92O<\in@Q>L YBis}q s`˿e s:sq,8!IxL`n|qԜ;`+~:ay3x^tg:>|?.m?~xyǝ^=yѫŴ,}3oAd=[Y/!~[8 Z7kD*nd.y|ӓ,fh6zBa .~/:u9!˃˘(:WwG7[,,,c yN‡EeH,?p8*^ƀEYN ގONXύF.cuJܸF*=g[?naF}B.d#`AYViYJCRbbb4qd !p@7L_!!@cԠ/H*/,JEH(E0ij.ETz-!|DHG*=RJTr#\TFEW.fʘ+"CeʘA CXJv; 7`T#4Zad!-Գ$,%g$Ri2 Ӻl3Y+;qĪvӒ6$NuAAF@6qZـy*F NLm7[ W1`g)p!LBcչrl˰>X4Օ NYNY;JnX \V^A SQ/3DeZ0yV Kw vVZJ`pySjG`b9u(7KFD ,ɳ]-̘Y[&$q.Z\44c!ÁDf\lb_FTQb. QyBN1bB P2H(g#F&"J ^_Lbvι*_TTOgz،RQa3J=lF6QEc4yTf$^kP`.L{E98VE,}:@Ng4KH+Emj4Th0)FC!0PrWh4TfE7hHcD .1]W 7"hipv/b:u̦7R_HZdԓqz2= [#!Vpt5g1@>d06_7#GbNbUzdO h[$6jK}d7KщRKH~Ej 5(j7گ՞o5/<\!;}ȇJ͐L+Q$f,s`3G=i@)߽ω뼂z{'˷ggН_?˿__]g6o$|V_oj{\:/S/ Okl@EꍾŻLGak|}7~%SB7xwB;"ф<|}M^n1}&K2ɸ ɸ)#?G2n}ǟ"wdv8hԁ7q\w#$$HɀKS!F%VɀoS ,nKۣ*vR-n5%>55#V/Cy<3y'gG;oܙڱs&l}h_Ovj{|2!z'@\v:^srloc_XuŪC/VnN)޷s{~QFrv'ݹ2)'rM0\!(KHK(jfDghwú!~@IEk=bz}Z[Vgzk$ 4Lܱ!SLB4c/Ԩr\u3},)7QY_(C {mk?r~.]Ϝcue즪6ˤΪ6˾#3ں%>>IgSMP/>jg Z`E^c$c-`VD.<$Kta4Imx`qβZZެEg}M^[IDhZTkq!O*,YT9{h\4Kfwr4.Rm%pxj;93;?^pi_5qUJ{s)a'c-@Yfi+U>O0n)rc43瓓>K %algX5CJh_i])M5 d9L;{l=r']ـC u+ЂG)Yt50oM2h$DAdSt>6nK|wK{t>\]65o!IgAuJ 1I5Pn9T%crՄ +ySܾ&+k 9߷"zNk!GN ǍAs?a=*V/lua []VVoE'M5:NT ZkS2 5Nbe@Ω碪3E"˿:{c6SzareWf2_\ݕ]p˲Q ~el~Frgܰ^CQYd#RFQ$SE3P*nAT5Ljr%d,$\fb̭PN/b*,B5vѹ0|RT_<=f2O⋧VBGG:LÇԄX_6x'sv?}apRuiʤY@> nPkw4x+'$ k e/0n4;i{wzHpШ,Cy4&d8;dSR58I͍M@ ۔I+ܰ,Y&/$L |xE{={bYFJ3#KMD1PDXGM/A^'5;{wгR^>c䈆פdQә2 {J"׆?|/nsUUG<$< (%`<ɆM>%կ4ݦ[! )X TJ2 !|N]*k5_/.߾7D^Іsaӆ^2%Bk e` %jByȴnΏS`SeC)U1ʻĘBf+ȷSM{G {{~kIm ?&l\ƐK9 5Tf=&\iR|3,h?@c4WXѣA9I{Hcԋo=< IwIAhmѾU\Ql7f7u!=n߾9˟`,f|>b b:D,2@Ɓh4$f[V+rR%`Օ2R坵0yodj:]PQ/~WǙwE.5mLBmÐ{#jjqą:X4ΌlfJ N5>Tp.h U=;|U.ɑ.ߜ:XigRM4d@l%D<@MƨI2\Ju>)p"G5)sZRjgA ;͹|eCqOV5Ŀw:^}+ r_qy\^"me'9 ~/pes|ׯ?L#`/JR׏z[-o6FW_\^ّjo]yܻګ~V)ɫxqgww?ҍ?2Q*>y{> 50T5#-}.s!y1Hi7J-vdPei x V7-sxt !wJ4vI1F]9yIգ 8ZBjiƝG 9Gw}"8}4"sNzMsAPɷz PF4[ENn݌Gz0XgOw<̛@AO|"e S^)dhUA +YvN<, 4"BcȼR3(@{cpQ[Jr v#݈H>:"ϯHQ \`"~QC9=( J8\V0ךyjfĞ;eC*X52FΐD7"罽30;7<@7=S4ta3)3';&AZJhcR֪QlFnǞIQFΡn(_90H% G.'6 H4V` q@z"]+Y1lm1@: ~6 u;LFC;q$4j;AESpFwA<䏸$ +;FQ&ޙdL*Xe ?3;V#=ܶ~&1稐]t^J{'&U6*G7aEF JۖH7,CiP$sBIzɑw#ڑgZ-;A+c= F e׽_[ 'f`oX6dxb- UmL Ȼ/v9X92!ovT(,UH]'i,y)%680d L)!qtwF :L)bDZ]FT*Lȭ:/نJYe:Tt1P $ )$МvG#v'gHFĄ'lm3D~DɄHC E0ݩx^;mkJ-@.fOr(< .P !VcewȀN.aUKUTj bQ-B$IljIh"G2Xa&+mk1dEvf4ƶ],9 s(P[w&zqD]-D(TqI$UIEh<+TeRdH":cl7U_m[R~:H#P1TC-|最YBwr)rZLxPI&\r8&L8ǐ&%g(G!4τjKm˚Uxi xR@ȇ-m$Ƅ{m Li!')Qu)6R"$IQR"cS*8nZcʼnh1CxN6ۯȶ-iS[đB8R՚&O8~z9ԕ7.졊Zއ3XĔ&r< ko=S0u%ekOR=ֺO!J?XGSRfB@{>J-ag=R({ou-J y㚻 *SX,VU! 1G5 +EäO KA+qFaٮE6\b7>|W@"u@ʾ$q>ԾIWl҇RHuKjg!54N+:fa0k-b8Kk@kDdžzi!2cA/ALI;mk yLaP~^442ю(CyOũZι') C>#R,SkUR3xc_GkKo|*=;{.rXŃ˻oEeo/J\8{^:B=]ݺlmOMs+5"OuLQgdh0r}荷tӵ ]+{_/=m}m7n__ަԡcX?E\/_Gr:OGfʽ(bm˹Ղ^ z,eM @+L-RU)I :kժİrL嬷LXcR$ha78#{\U^~uF_+j: įϮԿreTxjm5Hi 4V@-hc[?~ĸ!'L~7RN;lb[;`qb_÷]2oĪBU*UR💒3[[WW9Be7eh/jvP''xMЇUy%>.G#2hˠ/vlj[tQ;*<'4T eQsdm. m тCtF{e=އ{)=]HIACO?Vnj>!r@}Gtcybz.f03-U:. ,JסNFJ;ZOjrMr uœ5@|Cʾ80Ce5LHglz=\b!?iLk؞ bPS@ Ri8xƱeMATXȜF6S\ilKm%D2z f daٸEZJGXI4WhzhTY[[<]rtF}4zG12f%j\ iVxWM '|KqVs\w-Ei(@VXYU44 s^ 9dڄ:YTEUqv듲'F HG ̨TZvjDQumcYQRbSݑ={0R{ jrQ9D;4mEIE!&}mYS0.J~kP;$R0EVgH6bbc.|d*l14I}T&4.S$R@""mۚ~05ydrW3)C/yxH>C^ c"%쒇[r]v_SGi.ZC<|onJx(WBb7ahӮ>尿,Y#!]e9f1p G:C__T$eQR(wwˢj@&Gy&2Wj\R!,_b|wU$6P蒰 JJ ?|D"˞1:*i#qy~3?-zgI }i -$@~ªR^yik6)g1Kfdgz8F JZaQ~spR\ɄY.h;|aEe W`RmlZ׹bOSQ`Q/#꩟Mǫ.uRO}}oW_Qgd7wb" 62uɡ<4ԕ$sAsq<4ܥ:y)wL^{󷢭A{*n6?t6aR4Z _̍)Ɗ$F=ƓE>xU#œ)A-?԰723Vt(Nj:_e8_~<͚RR3GE+joT`eYkJkXR $:tv|H)d݇m-9FqUdvOJS2 ImR CVODel])zszzk뀵Av2^dTM!ӷwϿ/=2Z-a WRtiJSi?0Re!x;7 BžYa(A}PZ[^V =~c]hD(M{~|ȇ ;nĠ;fքBc=jx]3 )P_Xªr6my1y(GoH\(l3 xo8O>VPh"sV<fjRM\Gr?rVzHr G9+1p=~d 3QO RH~ԁx2titoO&b[+->3nQN)a2i!pz)d/Ng0!H st wb7;)t"sB0\N*.ss~~gk}oޤ߯Lyu,"] Ln9x:$rN*3=^$ ا7S\}nc}tdu?W}a;W5U sʯN 1fVG#xW,(.l Ȁ=H_zǥOl34W q ҧɏp^wsmbZRF/TGgmaS~Wy?nҹ ?§֟?%7تgY / 9p1}䔘>TEЖ3yIB[yjADګ*(!=1^iw~𷺪u"\V.;Rf4s3໣9`._BftkI3iYuqHLCϘQ#*%wRTZ)MYRB!s5A:lIAO  ыi@G|֚O1%ZpQu{Ol*A-c]qJ[GfB1u]”*CU%kbݗ/*O";PD8lQxǞ#ƎbaުQDIS^۳VO)\jA@ c<ńKFWp^ZHfhexMaUuܾLZr\5grk*u<.Q4Ҍ\rE9'G>JQugMS<3J #B<#X!$uRQT4K1j $B[JjL3(F3-xj~0gw1)y֚3~FQ."V3FRSۯ+;Gj)?NLR%s}mw[ Sr¾Ӹ 6Vo`^k4]hVZ\/:Щ hEhn[W-JXW}jO8c<^oWǿ2&7N)f:/7 l=K 8=Z:I}N?pmd"96i`$׬Iŷ uBϙ.> n}$:xvMO9;bYJcq%_}Mb ΂A2Eqȃ{dMw-eQI|>IT_B29[9:Otۆhcwp$Ս.z:gPqt˄bEOkmh0z|opr Qvg%Z,ʼ"xzc7D2\8˸y,Jܲ9^U*I@5e֛l\2-=!UAĶX㓇5e7QqqkG:l+&te)[}JjFZi;8-{gVmBe[- e&wu5gҗ嶺׮F>7U ђĖ tE=cKeu.sv z$\۱M,(nHx~).j]ȱ_*ýs1O C=5AX5e֫4[*yʪTʜ2 EA"!P;U)D~ j|fe߼o.NrMƟAg١ֲ0gݛ6$ͨZf@VUmZ{`k11Ǵ™UWP!Bj%%'bo+|6u-pI})$+((a ~N-4ˋMLʡjP\)E愡JL'5^?$NhD -]RrM,v7a1& %]u-t-+RJx'~)=EԚ2 ]Z7z?\Hhϴhͳ"[6-T6'KV1|h~y'::Ğ"LY [[[]M줳N jhc6!F6Tqfd#ńU aə9Z|W *WuNVrUj3]SAijDfK-Mm!Ŧeb^g/YQMduA<%JSnnZX Ȁ[xГV <[\z R"ϋW/+;ti7n";_qFÕmRR+/B8dco5$$)C@!ɦD# M-sw2Ǔ5e1=Xs Mݫ1Fu$G ,ߺΒfdF>MF.J֔:*; %[EjVU U޺$jh ܹ\AK3n ] M/c㪒5y *[Kq+E5TS,u{5J*n2tk! -quWXF+-6Amv|"bcj:FreR4n?35j`Ѧe<}6aqjW&5lfVZMF[ siד1PHc鮑~NkvP::w kl!i+^E3\uﳁN)ArsۿŒu)#G7$r`ѧ+y%:Q 5es(jwkQCRG<.y:cHF 5g0ofe1,s9aKEhUnԺ'cS"g}=H3UM߭VےoNCTa&Ky+w@ۂFIym@p]`ekWpsrӽD;"7S)P\v b;o0idjiINj+Ra2rsbUդkyGՙ-W"«vGB2b&jM˭%w+t&ԋT_IPy(3ciZK)D[,BB6ia[YqZ[3R9هf9hV<:yX|ۉ\l/co8ѲnN\,նb}:3m!UM]ט+v3D1T7:"/8O߶Cɚ2ZGbxܿx &:G7|~?֊j+[ޚ;X3Ě VjrkHsi޾+ЗUmy&d T7m1 sKQ}m"@[r˓/U/f͛M*}ɑ[ Ϟ ӳ+D9~_[w`.=45Ww)5WZk xii +utD]"z5񁌊l: ɏ[Z Mc~q3 Wbm#j1.#SBF\$#1^ZvMhVRJ ͌SI5JVYP#4a·K~ mT y 갧<[) 򍷦U.k~+tJܗ@\|lA7ƚ 9FdG8ojz0l:y+q9LѩF@ЎbɈ恮bm_zD72\}e|#XxUs^}XOQփ񆰞cU3<`+TmhvPi=)¼߭pq\KQ̧Fsޜ̧anThA684bBWy`cO\ nJ*y.ˊG^hW=c*xFw{ 5T]_@y;Qc#?}D{fז$9$XrIyO M_@؛Ay-$ f_Nͫ9WmEGj>α:`(b:|cQUа?vs> i>\q+ f!JOHK8qNC,xi5T<*Fuȏګi$Uf?H Tw4tDdx2ݏpdѳdlq8_'U8~֗wK:1P#Ӌz[pcX2br: g^N&pwqPbw78 i$Lv}W$ao|eOso.$vJh:czqE 0#A ȇ 9N\wٙgN \q$pcxM^L.帱)̢4HO?߽}yݿx*SNg/G\ gRhHUN.F+W2Ɠ%`'dӏ'&"M:'@wɫOnf^׿/mgE]nĺ$:מ0,&FX<O;C\;8]xBe~I3{}{. Μw|ܹ~Mt|7M@q/~o฼=q\ع27 |zR|vؿL'$ һOegOFc"3S^}C{oƽ+;/(<~\/ޕ?9E5RgDuës dx`Soz ~z zä@{u6]\s7Ր{uuDu"%Nܼ~0ۅS%%|uo|oyZڅ5)ގ4qm/!$8g ߏF/ '8S;kzi{}΋zC7 E6}oLbt5c/7& doy_x=6}(ex|2M0:;l`g_^=Mki~;2u7p.߾!58r&-{G,(d$9ݟ5U$T2ogi#@`sTSmg #u1 86Ŝ@p "hġ-UK8[v='Gx G3f"9N(=UO?p7.O Ln26 0n'_wEty&3;xg[~1x}awpn^?_^Jpo@a}W!["|N͹?G t1 F#2TRfV!$h}ͅ 8ɤ:}MvЏN:䌰93\ч+;%L8W濧IGӮL=S,upmk)?6[׾?q<\IlG|`“%ԕIޥ'u"-ڏW1":UËx4M3{iېk<jU(WGmIumjI o&☷GJPPb3 Q";n։iM:;_f%[&vWh}}T{?O<9:8͟>ҫ| _Q~86x8"""{YB*%p༸L>SҌn(=f~B .P _qmċ,H;dW7p.Bm+4>4lYYz}%,s*h Z 9 [*mL"j{A@+UF6P4=CTbDRpQ4"jpL8c %rv%%R Guetckϟ{a9&*A9 b_qǎ"*^YKh ddcKuJ6$C f (T90ZVJ=[Vm}]d6G@37N1lK!ut@3Xm~ ۄȯUx:*@,>iE+k R0i[d@FS]E ޙ Ԕ4jjhƨBMIl@bc J-qqqQ,/^O.wXgC^Z1w몹a[zյm?G}F$g<'<;HY&NqIsM笤a 72*vN lacçKI{_@4+ޅJ7u[w:TYWɅ=Z|dl! yƘZ߆gIL;PGi[q[['v"{~xt+}A?JVO#G ֨ r4D1Drӑ̕A\iƗ糿糿糿糿Y=RE삄DݘEK4HiNa*ZY6;6~ST7Uͪ;5 kc-vD.8~uՐTJ1v9b+F-oj/ d>(Z'f`(tHn*5 5uڝC! E^OY4_ xmWutu;c.Ix{֥9u58UwpyB۟.2|S75|S7Aj$2*Wf jE䁝*IFt*x3:s$2A"2#HbRk}Ґ.P 7L눢#JroڜQ5sFbVŀajEK ،bN)ڠP B/"AV\85[+x ev0Cvkr/fON/;8f֫0f2.aWEH³iQ8Y Fj-)X?HG-B.&]pYQ.pO9yn8 (up(8.zT=G8ZČd6*$dAVƲmXW҈$|0SU ޕ8U6hW8u=2WO]R Jll̩1_G I9Ι9/Ƙ Zh~eߏBp&-gl ^ Ҋͻs^} qEFΆ}|Ԋ 0o x6Hxؤ"ΞHI V=@Db#zelLQ%WҠq3Gb:sh _ Q>i"VZq`f_@% uxfȁ*۵7r1[h/=jfS\ Gʎ8:Qi486E!ScumRq=HւJ[1X-Ze[M9P4N SI&hRH*UDϕI8vĉVT͇|Qf6m858ʾ4Q!ZQ:55qt]0qQÀ !J '=AC de+jT^I|#G8uOK/ڮ7Net!&TTfb:@$X) g 7YL7,垈Yѷpщz.A[[8^;}{韝9J{v?ˤpPꊺ]ʹ#G%]km]+@Ɠe*nG*^{ش=nӵ|ӖxP+5ܵf`ߡaZjX{umk[[ϸtU0s4QZuH!/ySBH2@3tu%]B垜@wW0Czd4o42T]_V}ra#Aw}Cw7'BqpnsBuV8;K``]fKpxr w{b{&3$%ޗܸaUDy‘jr'Jgm:QTNTY{M)'KAaց mzk%ipqVR㶧;[5M0ۍm hø;D sN8z7,{* yB>f⸊-j8${+lݓ\5TA­wϵS{$ h?cmYfYٿSJKٖ#b &o!Eh%Q!fSIf흐J%yY 2@B`*deB2`EVqeSO^5RTj-rb((`DYݍ: +ɮP΂^]G{XGt15#X*đV~۲g}m<0ϰ@89bp}퐉UOk}nVFzێ)Ќ?$nOqI{x-6z8wAR)k}H,RbXcNop+Pƃ^c^l63ۆ:0~ u+"0׎*2يLLedNXN`cz`/jڹx W )Tu1ƺ-.Bv4 cO/sK)z#N^d[0smU %Oe]{F孊RiP!w33R1 PtAWs*lNv1xndӕ?绌ool/l$}JN+j?.H,EmQcUWY+k@VRZS3[Tm樆WB$gD{=w|{MYYS̕ < mF [$ek7(^?e-HV$c> c3tm ˢ)Z(PB 0W&ͺ=(Hz:<`apa@@3)'ҫ)Aq1yܼpJnM+3 k{3m5 Mqd&E#+6OXڦh +ƂŃtiIL?NU~K%?Qn,/f%rw,HaE^G.9R ubVj^&_3ӣ j*"+g&S%$95VLaVi H:sn7CobCb' +)߂p-U9 _oHgv0[)K*b)q4%[1 %"W@߾ O&N'LU?2'S4S޻wSwp˓z#$n%/jQ˰v I#I# 8InZ5k^K-ξޚ#.|5&au!W070 K /)NzӒ];JUf233V3NhtY3h(ob̚6I\X.Ş=`;3OeZ6iݛW&*bxdUUMP wN`kIe8Aj3Xk# TdzD~5@ƣِ'Vh+^XJZ5cg#[Ga<Jp"bo9ľ˓ygշ5XO-l2sY\K' g%P|ڳrOA~n6= %pF-=k|Uų1t Q_}>4Hu-{RnN Ccؐ1]bяY 7W}1:[QI~pRPpH{ ҆^#;kY[|wXk{ 5Qw,>OHKY0xU>=̲}D \@xRrI я9[|v*YσnYN'Rj5~[5j펯~vM_'Fnw %BL[.s-$\K5'5CkfW|{̶VeݭOKO|Z e؊; vBګ6eBnM3dvkl4@@?xRٷo@qA1!' .0&1BSR.aػ\h&Y:'$i) |h8Av#ElmN9MUFա t8J)Ys Ef?c3?,$SNwvU ]' \0=GH=fa͓~9]f7&~RhrgO5.B]:0xSCPb_s%x%- ᥻5+SQ]G<줟V 6lzCQϚ7n|ڍӼS;JħyGwH%>ab ?RQ33q9JMEW@0eG r`)jV_4Sܰ8k3 Fh5+< zsU:W{5Q y%&ɧtc"XBP05d޽TclRh5 ÿ}S ϚKcو(ގ]EϏwN[Li0g~Qҹ:F&)^N+Rc&p߇lVTgygmV%]F8GuЮRH&8UrQTIaS-Ɛݠڹjb7k׋yt8VYV)9J'^rŗ27>t&S9E] 5m3uma%7~ł W7ՆݗBquG!Ss F~<]1q|֌`LҮ2R`ᛑ:^VZU!u/~kIo~׫x~;23sAG(#x~;CѼåt<~! >JcQ.4^ DK&ɧKyŦI^Gh4uXLm,ue}ζ)4k 3I)Į^&$-fuIt{C> ԪI;{ur YD7fZcRg \3 hg?OSt EOӢ6єJ(> 8qGH's;G};lw+$tѸRL %OMNCyBh6fOoW~7{zoWiKC1>2ss:}.vR;FmH5TEE?,{]/FŧA`gW/^}y rµ 2_G׋aTm8 n#7`0jd}]MF&Ӷ/iSv34[Zۯ`bD&5tIfy#y@;_J+}=]M'/= lu]G #׸HCMݟJ[Qy fʌ-L,'a&i%˝5K9%t"Ho_xdj|v-T0#8@TR:ݥ}ۄ̿Գb tf~[W4K s%mHķl&dsh7VkNBJ_a)>kheGc``a B5yn2  W8 >6G 7ɜ6N(Mٜƶ Ұg݀>mzwj;[FiQM'(غˇO19ʞ|eA _A.G FʓF]cWc t}u(90`oJ 4B ?2 JǶ m%6h5–t-9ujW~^LxS]g'wwww#a~/e>)' 4a+bcv284БML4_#0vskY׸R=5n mӅܑni[Ѷy+`UnȚYhI/! B H:+GK:.7 2U aC,Rnv6P4XR4 fZ^T+}<!u`0!LQ^VrCGjmWò~~9@R60aػmh{/$׹yzL\`Ɩ]IN,(ɦ$J"ERb%ı)pž = C!dQ \܊qڪh᠚ 戓vye8zERYw`>c[2&pC wJfL Zq %IÚ $v^t% V0JjȞ! Kdq$GudOL(FAF--SB4=2~v ^KU (,ۨDN D 1S{81K8P 8HnE:sZUG 3MN)0P/f lKBi|Jbl8r%ZrM u:j.@JoK"+4_JrɤrUu%=!5ʘqKqaue!ρ_WƊ$FԶ* lH>yRbwYkE#ǶdL$3b8TQ.KΜmTe ]jL.K,mVTdԍᨻ,՜~DjUZr CQGy%s=>źZ0c򎓗gd"E:$NWRrx,"yhdYigĴȸDb3 cȬ[w܁2@f:$\J3MWIȝbl\iY*1ژ0Bi%Imɱ!BH~ )M.J!Yց8hVJyZ҈5º8L]P5cnXmɘEx13 *LlN!Px)?=.?ϔJՎTI;zfaP00E 1UIFl":Zh{Y: ͩ]됩 Ha_ҩ}[FBFI֢ʞhL0$gOGh}݆Ќw˽ NF}ؿ ]Od28 ^uVh 8mK1V6/Egz5[Qr#'nkBah4!bC3˾g>xA̛!=ܿ/{ $PN+5P+d5uydV7~CǢ#2ݙJ(wۛdֻXZُY4dFߦniuZڱ^G"**DI%iɶUTyUi5H2Ǩ]*f2ᘱlZ'XSd%6T?QV-AupDUqYo⧺4>\7JJ_=|ym6 {pj&ӻq!}ye#iYp:cY*8[6cmEQ^UʨnrQR[3ktYf]w~2&j(|, :ȓpQbͬpe>g>Ւ|EKFPm)SE#o-+r圕ߎ| ]*o/isTo[ny6쾪=+: X"I(E6ai{`Z3 3=|NbVp,Yד KY뜞ҝ9YÒ9˱#Y&lxJWkeNY:0ǿ#6kF_ky,7uJSge6ֳo7Pm-0vZC9ʮpfaKENog#]dmD;: Τf-X(L?01׭A=Fd}t$MWq%dbzM2ƴts%lp=LD`ujG 9ܤ|2cuqx;VBlqB 4稈RQ/MܛmY֟F=zVD',p_w募2j}ǀώ33W\5;3_ hcfE 8ӌ_s'4|>zol+\8_:u[3%Z?tNQ-)aLE7ͯ w`2#)dLseE^*pcX) &9'Ig2dW PX*eXwB~ΘWGGn ׏矸OoՀOOYvH`. 6̈ѩ&2cHR.1rqJPpQlx aLPVdF=2d\Y蝁$Ƙg;ϥpqq`S}Q%&dH]Loo&ɩ.i `#ɱFwc{BݷGD:T$ -Իue9!a(r=_lv tИk̟QfM;7.c޸oE-Ї0҇_t%yVd}z{x/T>i4$?qx牴>0Pi\WIˏ'cgP3iX@ӽZe Iݺsñy8d+Ike8cMrew?ш *ɅA`|u8֢ &7Ws|0i DOj fjʯC KTu^ˣ18 xw;uo|!fV~<&}@K*%<!߱s-M44 Ϲ~F/ҐAֻz6 #Yg貌#Ր?+3B%?z7~{<@*?Nx>\vjehJ&2gJPӛ0>ғ8QvF{#+W;8W޿yO<@G61m3kaVpdL\ZlZ|.kCw~~-a]Qot8+'Ӱֈ;8$sΓfVƣg/72ybsEfYt(q{ӇU[?%s3>~KCbwmo<#2O_ UIW3}O/+OU@INmym*t\F 6Y%LčJNYm#ωOosyam5DHsqE-]l7K[/*AkEcW^ȊgBjsg$_* ֻYLJ&:$Y- -ׯv__} E%bե'€>aO'.edMgq#x6gT\$*^9p|>_^ލ OcƦ/}ȗTORP2q> Lw7~5P_wCU8656V XQ(U{|W ruJTx ҕP]+H3p|yZ6/TzjAzFF[:JuAV|YUxxuddSrI:t8[4yY[/t:&J:D`47k7^nBgኳpuxY1pO Icƴp޻ea"" Xfg6_&SkɐWܛQ{1;_*tdHR[f1KKe0 J^*ǝsMyRӭzdJ*Q&Lpz<"K2vxf@[ p&҅S +l>xv g:5_wmP ´֋1۲V9c[e}KLEʄ8 eedQ`uFqq2vVL̈FB*'zȘN4uGz vYan,Y$RL,Tι4UWyQ[Q ֩ ps8H gPX#"ie\ZzJm4+ywYƽN!#+2Bzd~4e]J+{ZIeKA-PnZYlY*A˸6(%O ״02Jۙ5HDLЉfAFs ( 0#/;I 403c[aGTQOA'I WD ƵO/QrHjwmqiP@1ڞA˾4NUd( \} Clh.QzƶdLˀ5#7ݬMp[Z]1 ա!?HZg} jO˜[bi8iH'ЄjK`\$̲e&IڎUBuDDi? rXmb]GdTº FX[g(ijW4"*Y`[2&/SMMЩRޡՀV-5}" -j=p9H>6@JƔ®>6R df )\ԑ4CHYދBzUՕ9e oI̲ i8O;k@G@VNOfZ$嵡M]4k0^]^e rR )%NA&͜C k(6XCrpLezY= dM%7I_<2^ }S݇F%`JJաb3mhU3E0d\AhYFCHJ,wp\c+$sNXYm 0Ah&Ӿw{?e ec%U$fy!A>c/iCN@lk6M^eU!>cHA:xp$H[~v!OAgK`tvvJ޳[NO?$;0إwd-`֐V zԎAjq nTtZ`SI){7G9$BqdU&<) L9 %wd$7-̀:%MLF'֒hs L!š[#cb;-3mVY'[ a׮&XZVm^ڭJd&*tI?I} ҥ7N̙#a AO+yɨLY>SFvyct]>>]fԆib d a\mB ]@ X(Rh3Ts,AÕӢ*"{19ZUxHa V2gSg K(dpx]gy "]',jFbcO*Ь&C~ױ=_UNoe&,^^<_wwm/ ɴ M7" ֏Y{!eZQߖ`HԔ^nu%/RǑr-s3Oxq_b|w{M;4 _,;d(m(vز-?u'x^׋\vgAOGZu#_6T,BgxrKP9uү7z?ﮖto/[s2F_t5muos5EH posV*(z[ =8cr>u7]~zx_㫟˺~qEni͌n^_~k\dcN@l˔m˔}&c2%Eə3S}vMW(A֣ٝ)4g5jVP4>L 2b {GI~[p{@QCOUʂlFC C0gwL6GJFrYFB:#>;T^' Cs5Jjf}ե^ _W ٭gYJNXG(\6L$!H|#D* ."ױj3?%bzz&z hw6Ndl>[4ڠeN "SQڈN90R*fj)CJѪ|}|Eq5B$ޒ܄U5bs_yw?EaP|+Rthc#^]~e.-"nqw6[)<~XTNci|](eɞ_kCdF҆i7VғSRD#I'.M23nO PbP#:Ng<[DkhLgoqni7LvkŠFtZ1D°ٴ[Dk2%VLݶQx^dΖ6J +]rGOrc:ʉLi:t>ɖ [FOk#H'.M2ul匇-ft [+5!tFΦݚ'ZƐO\D[d Ҷ,fZ1w n' ƒ7Yv ,Τ dzd26)l2;1" sM|Y`33hC @x/nwoqݽ Q֡f>gL5N5VҾ~FΤ w>;^vFԁ!Af'!9wQ:TlBLjk3ĵ3#:!@]*|.f}C&O|e'4p{њEH5M 3g[uxI-e|Da X)l񟋟~ƣf9u{=W1F?zatAE>!ԁ09êĄexzUY-QxG/(*#}Upmފ9h3jj]p\[9>=ud>Ii dzs'`jYad!!ngr{fRh,lV`|RgfW-j+g/T9">y3KQHM!oߐݔ7~i# 7(ae^7M^ C KHph (]jҁAެQ\(Lfx)ɔH):h͝U:yD@^&k~S ; |& @ h9X RI,T&&Y)tf#17PMj_꓆ ւvJckm[Ќ3Z_41W8<,s#>j uqA:55zo@IpTsT޺4bNUƚ3LfTǑ 7tv(O?,5<2*K92*KF Ē^QYOc! )!h8\81pc)Mq&N俻`g-쭒晜L1 ٜ6eͳ9ˀq&'x?mlDkm/c%&~FqLi? d)N>Rj@ n~.P/KDݪ};_?G_ţx,O6h=uw p>M-{RZRx0PLP;C/^_z~y |._d-ݢ ^ .d+;FtVhiMV)p,ݼԌ \Ya{wJd3:**yk2^R" c.wAwh]t{ޱCJǭ>ǭv9krp7;Cpu['" : ;8~'K^|瞼% -2﾿ٝ -Udh)G3mZjggd!ҪW*g|.d%$r&Ż V5Ƿ2 Uړ]/k n|SO:<-iLw~H=҃M:%i#lzC =8cȑ >˭LOeezg ҝ7+1{m%QB7Ɏ+23 _qRϣ5dcOYUXE#RčZNpMfkM'WU "=VUl\]1.\r4t*pq:n8 QvJpAiw$E{?ʙFp #(16Õ*uG´Ni>3-ywu{hx,bL,|< l_iO OZP9&>m*7(|lpm?4q ?n(i3)O cy&o,q.dE^O ?] 7oCW>bG ]f   sIAr%À.>JHvZJKjB9-K,&?_~\Nwʾv*-ue>_F a'h}9_9pw6yf%HP\u= M9~jpb!ұ8UeohWnfٚR>?~%| )ZXߌq3HJy aSa{:͂C -,o=<\?n p ݕv\ UHc6حk8.խ+䙛S;z9UښSEL",#(p&1ZW[k5^xo6 ?Ә/co.c*8+AT0caqc %ui.5夤|$TV5@JRDs~*t1{LijPĔL iNRf7+i5 `T( I Pd4Ĉ)`XKn]] T9Y-LYScrW*NBЊ! J0lVjq*NPШ ta*)Q5Cg,4i]K2-\Md)Ijk'@-q1[Rb4 )zXޓ{|[%9Z>9I) 0MbTla$kMy %F*i%7ch5iLn(n1))/a1-zk~?`zmȽ^؇QXwS@ޮ_,d NKr7󯆏q~g"E_7| ly`#ş B?ܔ+:;w+=}y0ۓ_#_<䊁Q/So_qKa'JPULbJH PMA?X qY!ڒ CpOITII/0t?jD |YO*Z+(y?ՠD6H%՚}_#RQN#aT1%jSv3+@/X]$^J$4^V=~grjM'Ii8bMEWc&~!^Vn:}bzoAɺɛXX ]>@lv}T>v1\ r{f_/׳h|SIG x6Iڌ&ޜkCvQdׅ ~1=>9S7^^uP77S$8: aimo_vNu`H>7o`KN6X|9vmF7_nC0ZٺEaT hG1gm(6BՃ )L9 ޴ў[]ƐuЁb̲W #Ddeog09,vcٛDUDg6QL̅r{\sBӗXF eHyP#UOΌRH)pV{(jtdߖI(exmf?G#rtĴ,Dk MC (Za!n ye xv})Ak)@}=(|LokGyV]@Sm$?Yc4|We±3w6 su|)k_7O/20qF>^ƥd}ݎwww!f[>c[DL1r./e`jėI\cGXi@Fޓz:V`I=Q,16"fhʒ&` 2]8F–tݰeԺ6N8{MLDPSw 2FSuIoZɇn -|oOF\1#Yᶀm]'¨ڝ#Q1~v:&#N9. G0ƾ$捎u?l2~&H*-lݧEWTWъrUpWU kTԪ Q+Wts?|0p,]{ݤl㩸a3^7M^'ξre׫/D,dTb[-gh9'q2c'7yѲ~uʈE-/0YOIZˢCݼzi4RxB"I‡ <C!W ThB$Mx7UK="WSw /5l]88v62a~&pGs5^Ni.9.h na{>58W 3V1v{iN YF+: ~Q(JMifr 覦~|E0Fac !yF"1i͆^C{b WUM NmY%E]Vi.k>Iȋn$.1뗙` vqrB ǘ5=-?-`FC*_[g Sl7.}+$ D Qׅy hEz=>â7ϖ.t`}6Z Ν}VSLcva0gA'nlxVmS_6;K _*ɳKA1 >f"r%Ҧ`T l.+J ))BN*]˚p.yMqִ (Ni(*fZ4d;r$6 neE’`#%O1,-T^.jо8&BX(&s4EjX&?,>b}ox|X1K,~PO͵q?] R2oa^W>ʇy]0f fz}WP"Y!.)h@ıþF@@RZ),}|/@^L.SnMR_0@ bk zarm, h5t[H1D0t;NHUX#wa_$Pf_< es}{=lȭu)Ζ%ui `"!8kZ[%VLQs-Q`kr=κ7(ũ[M-R;lGrLM)+ʲRvz䴵1 _s )5N$0[ߤH@(,uj&զ(%#"J`,L%Ja5LUe%+R;„#HUץ%ީ7UI͉$J܀|nRnN^D'$aH n;^LJGS[;a@jf%eLxUh-9a1^+j`ņn97 #5H.34MqDEmV&j!'!p @3,MiC䵃V`I:ʍAFQ#0Šl( oo^L ,s}o[@?L(E.WTJ,1=! #D)tU뒔U͕a%c\K$x#cȮ,#fV5S<_t6xS54-zE)#,kX;s|9fl=BBΨU$ѕDU$B5ÕK-T/l=k./vy(-OI匷_.T4#KP54GhRbMR f/!' TƨN[LjJZqՔHvgWH.M;ڥ oZYAإJ0!ca' aq +==zOR09Cw1_QQ )!;4 tQ{dP!!_1>X31q}d%.erqP/bMA;@%H-9UHGBf0{(ؚ՞<Ӕ%L={_}I&'Xl+ٽZmݍ+}nzao?Ճ{ 7/^*o׫_+Wcwe4S/s}k+mn#7/[dd:ߗ]'\w%)HeJKR7WנDiHC 2]^-i@0xnêtjۻZBBA"1|4\}{v <}+ɬL= EyHN'bwe E9"^[@]yI"mז#uv ,4sjWC)ŮL~3ƥX3as۷ZΝ jވ ka,[2Ң6^ hhKwѽ˸8U/ [9,*lM02B¶ k'Ix؅u)v¥n* nQ𲡍#GSBhW&% sr[SJgٍ^HtFZqgx!$ʘNf ȭLv؍FZ5\**:v[PLg' Mx4KC4U];}0PY2 e0]F;_nxI &T9EUlhceX4m;cRߞ&PT+Q:N0M/r1ˎ! BZv>IZ+#&w*"8GFJI2fƤgռWWy{3v"dIƀvIfRўA< Ӣ #4X/uõ Ƌ\adЅdHX>Zc} "{ہ;"+N9!imvmq&N.3CR353O̠mLɧeG:#}Fbhݷq/zl"?gcG?2L.К.uo?NNy Gk5^#/y^TKt[u 85NRXI[3Rn\bIlhHa9mn5cC>WӨvkrtx=`ǭ j>o {0YIB9~K֑QBwO.6ANvV׆ 5Z04ZS 'wd|fȖQpCdN SA4*)KI ҅ŏm8ڵ_c0W@nP611s$W32tT5nsu.jB WJzleA-ˊP(J&B`^Q Jm)=|/ʂ4Ls&Nx2#W:faz0ȉev4[$.'87JNq "0W@<87ŨT/v/͢wd4Wᷱ(ʇ.Lg!1I^`|Əoy uq5DB <8Ҕbo) RJt%w?F&RAQnCF??ԮD tUZ.+/eLE,=cJ--5}h bWVt$<酅x-h!G n!uU޸JKU,KAectvXVB5 =[odm(Ks_lKHH!TK&77MrOc%dB;_|`Fw}%}x$Jn!t{:XGj4yfGhHb*q†+NN[6}vaXCHiiBӱЈ&I&`g?I\mhu>-ziˉ}ňFLSvp9cG \JD4B 7̞o P ҕ2yS$ó~-PrJVZJkO+*hu$H%e5i}<)Ra2'0R˝αe9# q?\Ճ:OlTcmM8LaHr =V\J)1g,NA{əG @NTKNBA |Zs lOy6.NXF\6|Yz#9^7*Xʈg BQj'Vq ༃j2)a)Pn gA)L])7V.e4t n'P,a$(^ZM^5pAr8MN,u' ]sW6~Yr#.p1 8ji+#@ Ajkd#Riq8Td9 -8 XYS5R$MđҜ^9B;p(Ln39.E$]0@bC.+H&HA 0nxETˡFʣJU0{-P데h\UBEr"!@hEr5h9NU!4#5sGs8t2Ks!pˣ d BUZ@u /++pae&c:0 0i)uPY)"8Tʊ<>P@  Ue`,g0)xX™Ȼ$ Jm6Mx+g8A(H%+el~z(լl2pŕK+n%LZS*[r";Mũk #+V )pU}ygkσHt0Qypȓ96?޳Tr>>MWG\}([|0UVH_d:{d姻zb{DȻwtG%;7}(:"+ )GD0)<:^:fMRRG:Es qzb"hHj؊P.YǹPF"=~F_Vvr:OU7 h:PqT:vЯq;4 lt2 `t@nC]O:! i)0w-2Q~{0$ ^H';Ӟ)UcoDߏkr>.bG4;su[W{*/*]',t< f%NbC PIiomdN)cB3[W%yd[_xMdA0 iQDH˽1]\Lgŭ]QWD_zo.:ͯnFia$F'8HʾF}avRcwvc[6r@{Z9RWuzGC.@tŷl4tm 59{fT7ki=HkÈduK_gBJ][9|/%;Hr㽔f6y|3F8JYPu)%ttYDQ%5vYe<F~j ocs[$La\_bt[ 7~֓OƸMoo_K+O.Ѽ 4,°c!m3\.gwoRp?2?޼Wz4'-#&12_-p73@C@ue6j n($BP(C^}h.YB$r!$VYunâέu23 P.Ed<&Y:IIxukCep^o ejReEɌVZiCr.܆r6]d\m 9Cn#̃3@n;jW2ѯ +4!P;37{_k|^:z=/hӞV޹L-a5U-܊JX{nԤKԼDS|ױm'M(%hk@= Y!n-(m"}$n;O3 -!|Knןhȏw0yhg/z1tqN@9?k$8t}wq5~x o4zx_wdJp1io)Lzxj"Yamqӽblc UBS2O><׋?BiDƉiqU8IyG8I{<8A)z[ ׁNz<h-yn ) m<ݻk^ JEU&3tkz1i g|wCv`4X ASfF2xk>/}7}w3]ݻΙ"ێrFQ_%_uk6њݻa;pi沫,vFSu]F(*J߶]!б}ۻ}%!PUۈ<4(mmcWݻJ2 #l{ ᾳ&.~:.!uST*Ǚp֗4юAk2H)v;K%0yBu::bI{KKzұ>2t[J4:BFg0cp>8w =de{-FRL {잱1]] k)ae;Bn[\׍V;w2Qցʍ@'1e( ]H1GߚSk/ z8A̵_fp5\tGϠv3'04F'3*#"hHL:a]3 mw)FA, ꦮ ,X*КjᏤrCJӷ^ Xߑf: 1dF i*q6uJX|;X\ 0Hh0Bj4jN= F D[視4pGQ!9,𪏺IPF9 Ac0lz/5h\Ul܈cC1≞U4hgNQ{ERce!4kA/8:MAn@:^{a)Qk*gQZǛ萘KCb :0 D'oWPi`gH}fv:׮]|u9~ܴf@Θ*Wx+#7G?Hxu⊓=?D&EuE!]~~!+ 6ean[Buc]3ji立jIo]gp8[X==3 gO3N>ykM6& Dvhg~@(ǫжCQw;];dN[/"UdMɓi.^k}zzG[ju``#a{uOr ^mS8V8xN8 PN" $hMZMTkp98fOQ  Gۖ) ꨇ1E *~lR d>{-?_#.\Ǯ݋BCԈՒגǧG'[u@J8BبJ1p ͱxS;%\ecZPe%T z nizV ol(qALkauj54PAZ Tje`.DR4Jp` q\Y0njL!*Nq`^=Ӳh F*^.8(..I Wc] GψЌne窭}=dh6J۲6(]eKݻAVWU1t*!"h?{ OkxI'Z]ZWWhmmg~a_g ikYz)n.2OqfWh~'JEPԳ8RTg;eHaw{o4p+G\eY?ɧ'`qҮI784prS7qBDƣwUIk-fDZzgm'ѲvEj3:-nf7@!Gdz*|| 05=9WXVZTwc2O߯]C.q9\X/.v@ۛcZS9? s+9~ "wqY扳-~v_A;=X1X9y}x#prw`Jp݇I08yH1!o(-'Ot zZc[6}W6I7|5YSE!j4(],]aULf(&p)`lݪ}wP.{2{[>ʸg&SpAܑh^9xf͗W7nw?Gy#r>HIҳ7{8;}.wȰmJ?1n~: "w7eS* od9cϨ (d[9(cpFPu(2jcXM 4hfjkcCT&0DnqFQp!ڈ^ ]jQ_fMtpQ}Mм,K'rrvHvT }`THH0{S,5/\Κm0 TQہ|h^=.ؒ=W2t÷W[E7򝵦әHf]~D͆o=QAo$R50a#-g> B0*&DՕ Ԓ_j7HJL'IQE/<90wILeXt$MtG799@mH^{LH>f#=SVxQ7꓈HHG͝cˆ#F 8%]QyxIศ#4F__ڏk>R ?=ܡ|^=d Z]ۤ2bZl$oӭc^Oh)˜:QpË-DǠgv}dD𞊁b-pp_qHE`UZk s\nX][eiRb= ې#KNVRgϗkq"Kv()w$wUyUosoNp>mn]kyL%Dўn5=^fddWeњ~G񼞖uN)s(iNx?%e!)nf)n'Jr8冐-tON6R]Z]OV ⧍Lctp[ `WC~6Cy4M4tYTIVI:̜ 睢,B,nn JڀMf1RΦبCv ,Ʌ8]p)g]Vw&O]յ5 %8lpZ;ςRmMtZ8嫝x8Lj:KB556PkEEjXUYTE2iv)/bd<'Gp411IASn?}N7e!N+j+nHİʃMX7eo$7)fJ#ANNҤ#7Y5&iʬ(g(IrvRMbB`x=T՗(8Q8s (niQ'yAEiYN;Kn)4}7wEro7q;c{ œ}#sF6(h0PPMEu'օ0]c"dEO;QkaDмjd S# ɈV\G@.QәD͉(A D̒TXF-vjw}~YlO? P/,zwA݉ ~Y|/2@r6TX9X܄¤_Njמx 1Rut#"h YF=H,KՐɐ m>S6bަTp$?NT?N~̈]umFymF8[Qf02X2Fn8 K8 =*f5aTkP_NTKԝsFLsԄVF[Ine$` K^2J@sJB$z)X+!qg42N3Z7e RBI*11hPS](4lK5 [扑0zRrQjgH$@= YLܨP+Q*'4XBb; qU@T{+pU Su\E !Ք\T*QVE, 7"GJjkউ& Zҗۙ`qġ(PP;6E-Yj3R>J1:" .K3*tV4G;k&wG9-+󞷅sQŽgʤij7>uh!VN4q B#CnxeZ!@ J۪pLj(aTWFFBٔHiS4,PZ|SɔM2( m1h%x\}CD%"g_>/4aG; o;;_em~n )>/pr#iDIEm"`ُу>;hx>Rk%l?.spcf Zh]ӾA!)jpp**J JRy|pā$ Xɪ˜' D~s]:\7[1+ }—s~ڻ0d#g ӾAu~p?@nЫ{1H1;Ѯ_rTU3>meOLnZ̗6Z<ݠ]@\' ՝r<}U ֦B6]/A>I.[*-Jnۃ{߿γ*-F#/א+D#^B,^-p*"Vr܊R ︇u:^G2?M>AejvO֠ᒚ)Z_ؗs߳Ss"^Ғ"@_OC3t@tiʖkΩ5 Sg[=^Jǝk= ܈CMYL[|>,8q7LrY_.d7hjqE8 ?oZ-f=6Sqff$fdTS;~)қFF̗ oTDbQ20ӟ.B邶D7vi%sr}h5װ|PkF>CIy*:zzmd]QftV>å;Z6aU.wGÓGy^!x}$>7W ʥvG~Ry~mzFJi(v4I( xc$ 9A>j5+`B82F&ÐmDZP)|T~Γ#mh8pA584d#w)lVީ]uN5(TbNӥ;*A'[(@pikMґXd2p"u2T;y*Y0gYy*ݘ9} On`UUBҧ֊qB_sp 4:zEp)Ne N*qZ\l( ({e\eKh\&\&w wP.P1#thE@/zcFZ Qx:xT)-Snռr ;Uᢃ#AWܠ1ܲYL '(fAPIVVGѫƵdǭvYߣW2<܇|( 0j;(l_sZSo'fJ=-B:\t@@i}tq֊(vN=ޫz}H4|7,}?:||maU,P\AE^& )|U(z{+(x Tw|GBDZݶu8uf<`𵢳1]6|{{si·-]?@_I$UbmtŶ,%Q]PtCR>zbv%=?:fS&>ft+a˥l ۹&q}Ϙ%]`_g_9!yu!g1rG<$o{-M8q&Yi;zw&@4>^D]2G!WݕSPR &Py iKw$y>!wr(.=/[ظ1l4B]G T eJ.N T$a/IX0^PoSm=^\8zMKUm-%hr#N)ՊtNSK2=BWOw/{(Zl}ԃwHm]zkc1Rm$ifwa If01&0f,iDʎ &uP%!$\ZuE%_=9$91XaFҭw|1((1m9ɟi_̼2|jf w+2LHtٺ*y: iOFQՏ,7h 0Fa5tV썢q EF1i FF-ZL~#eon(Ζ PRE;ae*Ljco*&)u\=\&=Z&#S%ur KW*t| \ozn_Y*XBPm=_ 'q$q 88T@ ZcB\#`FƔb^*\npE3&lѬw\Ju N;ahީVvU*'yVJɑb. i3hIdڣE6/\|׃b*oLh}W]ʱq Lf#:i}.Sf( b\0R.fӾT4Q9mΕkWrЁ3YPti L"٬n_*(Ei@D9ak ׯw2YOd27k,ćb+afFhv/̢'>jϲ;}mEWW>hӋ(ߙQ]W/ۨn%oC3/h(ĝ; 4xn'w5>2Sn|*91 %QMv?L$(z! XI+2~-V2]1%,܍*I9IGU!"H&û`e S؜sF\ FBPM2!b*Wzf\=D"f Dv}˝V٥"l{mƥr.6pC&i5K@ ##plcËB' :ϚgݳA4t&mH&"YI6 p9ySUEEBg}Wșp߄ ;hls!4ozvv4[^ƩޘG> t)P%ch2LJ<<9> f[kdbٟN>;hY%H*ZHGǀ-2Py q :U9m[lXA@9#\p>_5xP`spc\^[6˱!0og2a[~fs9%m g:6w4&4G³(;Rm'YU⃎? O]FMp#"DpZz1fOjcP[i,>uҸv.5~(_s"b..qyšDe*/0`L.(AG+⁩d#!͕"ҾFDrRF̷SԠSgn7ZQK}}Iuw%)8D쩠UA(!<(E1f"̠_A _d#aCA+kqtD#F1 `|q1F~|<|"_98䳱ڜ;=[CAauAibd ~8P|8,`-}4 , '5h-py:Y<61dO~`W  fxOz:{ޝgJ!Bś̏Ӳ 3. ,1RZ(A - c8 Mg$8J/=P2>XoWr\_¬L N'iFTyb,%X=2.iKbO- 8#+W!ʥ6OC (6ڴRU@yPX-f- Y篑Uk{8ùMeg n^^S-5"wfbWVmy*azZ!" asxY_3oUqkqȐ#᭡$5"~W+;I6Y,$dM QGr,BKػS8aoZu2Ũ)aoFDR%l{D6 3 r.IކPX yYW2pVX?4 B'lUorG'o 1̈́N1G fQlI(۽3Нڠ$R(Jw0Ӝὶt;px;q</~ֱ6ݮ-//{^|nagD%o샙qdE&2g8|Qd2f֯z%)*7l @vkdoK3N 8xɴ#2@N^'O}q01a j*qA.ތeOkX ADùLnPb450Am<bJ}H0t=1R0XP=oXin&y W  '|ׁ" {- &/2;T;zvڎvM%(Yk5cUn f,3.qA rA.و C-_uKB1-}Ef>U@fF` Ðϲ('YLJ\J % 2?%gIl2h\3!@>+&2R[`-]Rݲ]KWڛ}ӎl[ Jem..8 l[-Ep./< >z+~-XRDF2)kHJUlli_ R1 վk@29\*'se̸v3v QW]L;1|/ң{=r޶=T!s]J1 %uW1)Oi(FçVMeEޑ=L ԴpJݫ9= DM)@'#3:>'I#tGO%bqn~ wY&BP[CNge1_R<0']x3zIM[lJ;T;2 Qqq0iӘ_ A3:È\%}9ie5ZQ &,1ɽajqց;Ŏ}Bd>YecD uoQJlXbOe=  5(d$%$DDHV3Bc -X|8.:f#pr|n@]߈Y6U7N0M"yY  meID^/(9k"-+|g4Y[s%M_&O*=qIOn%%}B7@`ǐʵ>&ֱ2D e-'/[==w@5ppҖ6C"vyz>S=-@C@&|[wζsgע AfWb 4 E;ZͲ~Z$ eyuqwgòLH~0{lIמyo&be3.N}|HɨhS(#YrٮВdٔu) YӨ'˨넌RdYG6>M_)1XCPs W>+kM`}CEڪflݎ+"g;U#0"BB{jAjQ'+DmcDIIBll>"8('Q^13AA<Ä`^i4G@ʈ3 h * МoA8<%@qz}nyCW XcJkp~@rBE| 1߻3TD 陭7tZCJ9mťHKn>9$.VPvv)TO^~ͷ@izPGoK;O .x!$S\*//$LO<^yPIDx}B%.,"-g&%f^4"l D3dpɡƽ/wId&mI`bGglse3輲۴¾If>D4M]R,}c2K0=Ct?Dr1ਰmKj<ߝmA546ݧkW"~獭yfy+%DI˚|jpg$/5뤮F__n9Gt~M~kבSF?)cg/F_(6O2j}Iwo߾w?||77֗z~ɴ~;Ot۫wΏo~crwW\|?><_ɰ]N?Ñ)ooû06V᝭zr}O%--9,Upۑ]x<)wƷ]S)a%'EF{hjIe':4צMD὞=;cEɪb a֞ˑFʬ/ vB)ܽӣO f2L(4kaґbvdLw)$|3g-6V6c9+욢(ik&ǡƽh seV_jl3fl=D dN7wH5hbՙ56 zTG= ɛk1xNgN\ zz%u`DՑRqeغa·W^f1a$㛷aо/-~}2H] _~nOU?FNa7T`&y4'b TzwuQV Sv;×({`}H=k?&dLɤm?}H73pmk7ij>߿ٗak?v**n ҙ a1XwMpUu d 0l 䕬#ޟq W!׼!簕v5ٮCn =xKgӋfJnև+7dm& U \>?G+M<9OF%eV}+SԷ:Je&XꭵX Ic ,(&2%`.J8`G!MiVRcE;9/V::|1Lq\ wC@,$D`J`zf9#xcN:UO2IgAqUjy1QheZPAJ31#!l%+"(=Ǒ-\i =G. B\B+n @Q1<0Lr HAdZ Pԟ(b0' ; \`fu$pѶBNCΚ[snPL3E4T QZblGFfrp'h"XGXڷ S<M0vQPj-ڨ%!1oUƆ6v-\ RTwvqN^%.7Mkif7yRHìdX(@,bE IQ>Ldk#9 D2m8b NQF3H0hc:F+ܔ")ErSU7%k'4[(P0cyq;"OX0da'0 OxsN;앥ڨ XH,>^9N8DiF|y߹/)RȔ˨/3g2o?%=GRS ߒW?UNYƠ"7Pm/}Aht]Fٟ:70gŊYxuϨs D[gq,ϥHR3_n:}˄O AllOs0l~O]iauKR7|k~">/8̯t:~~#u/$Qh҂)ڍS:_51Z%CV˕Ƒ!'9n<1an_ ^2[oio~5xOP\qh`J$jc(84$7q2 gAVuLH+47Q RKD,A n;kcؾIRXc`OoQDm 3Pd t5}ۂ؊{e#85vitJ p8W[e%Z&Eڣ, (b+wy[;T%U%XpDT\u#ƥM)׸NU+ TU Jq^`<&\TU>[Rp1#n.ntK~&L(03%Ĩ9 |bC:EdeJq ˸l ^簞91bG8ob}h} qyV' X$@Au1]Yij`]: ۿcMa^Zh(K )o)!l R.-kaj 2g-1ae]/W" 77 >Wtִ/G/y9y*F^~:SͳζY}ŧ'm`9A9a^kjXP\D$HRd[pH#A#–nD=gzeWV.'/r qzG)NX6&@h 2!JO52 k&6RF,"hjsN4o Pa )iD)V ]NwF)+&rsO,XT#H$" Z @r:D P2ָ SWN,zp^u.&R#}:tS\(W<)@`嶯Ǔx*D0YReb_c&D[Բ,ĤDzTFf%TZ*k. Yg dK%q,6YGfPAIPIk[S[4qW 8 6V[ ;{d ZJ?/p1K Ώpс[좔@8ŢVT.B_\~ M=)6re >3mfoջo?l8n6-/\ۻriAZq}HMw`ݚ;ZRi۾J[Zg4C)&*@TQ#7*1k#$ ʪ>jBkCFURyߪ_]}r;5&Z{wSƍqSmsSN 5&ʭ ǝ{z"X6  '`4: ES!| ]^JV>>cTa1.jm~/mkh5NHH2IHR]@͚6GUo־^Qຖa蝹  'zT;a_tѹy]ohP! W'0 ƧJG *NY";U)5"FBmj5@Ml:j4y!iwݷ!l}ׇ>Ah7Œ䫻?m73DۮiR0sΰɊ@H!w4!Ϻ|{u}ަNuV>7/MU-JavR&8KZrue˶i"bԓ$\߆e@js|/p|>p!LgcIӻ)*ևby׀D/ mEMZaobtPi)Ҋclæ_CJ@h3\˖ю 1{A_D YJQŇV-gM?]JϾTiA9)iwhRPz=hJbq,a)pКo-ՎF[^NjrX n7)L:<|Jˀ|YߞtxTU8 ybcX{+K᧳<\3 %:_Q/UiHݪ%BHwem%?686{7F 0FL%`guDU=DޱЪ"kz/U@Qш<V&ײl4 Rn Q(L:ecK:" %gbgϊSPbzW>]0р;q| oN GM3|W"0;q$ybUjۤ"-G8A`[x԰5ù'Yns^m<:?IǠ;\Ãrb4jӀ:離ivRky t i=ǗS]`t~46f6kD!j$!BÅH!NZXfk X?#h-Ǩ߸`$c=j{քR#>& Sc 攏k*r}qbHi4 wbGgd FLSu5 cV]ѢEo B~i^WO.":D(b/Hx)ˮF 9lnO8+Ol~۠zG5GӾ^Ycƅ}C8Z=`(`:pQf-a?`K > c PKy]Tё0㕣,2QR賿\HtE%s>,~yk0h3`jktF(,= 2.n0.sMH8xGE;5w-h^"gu5k-xS|{7Tao)2smj̆jZUr ->!eöY, ~6K}5?[?6<;|qDJ!pkA /{=gvAC#Ajlᖀ Z\irB)gJ)4}F6A$Oi7@RJ簞wR!Dhuac6:ʩ# nRbX`@rVT.[Q4$`WTΤN"@Tu9}i zZX Ic ,(&2ǖP > i: )PSWt-cz۝tS _ڄv:lgc( Xmۨ GΉ)L WI\눬NNt0 T r^knob[@kZw8&0a\۫̅ l?j!`x_3of+f{t}k/ ؕk~an gw/%{gŔ&'r7ITowmm,9 W%e;/NJ&A8ӳ%.%QX|ӗ8d/s< CPQ590#zWn{խ3;wcM}ЉTToб'\ F&YFH1O>c8tP(BnYx٫8YQܵeחoҨt,VBz`0Bx=aRX1"8(ygjNA5SK"(a>l36>PB(ƺe,0AJ%^kBih.~rv`~ۮX9{j7BwEeMh.C[Y EY:BWNqFW crb;i!:&SL,2өM}r6/*.x^(M7z;-cMKNڷ]?*˲n1F .ԋ1iNzBg5JbIyT7"4\tFƛ"3O@+%;0#HJJvZ9iq˝NrS~xx7.#͹dM҂HS T|BRЁ.꘮j݇o0L N/m a(wFJbY Y$`ds zayCW22?1wp0;` d=q=Ϊ4gOe.?x\=wqӤ}BrP~6n!DDH1G~}}4z;34>ww3ESǍO2[@Lqeyxx`j߻apcyoh(hPưD{_}? ݕrJRl ͞F -M|_.R2B&,!\p(,'Y9xA|0;7QQuD1ԳhTD,C:SG %`ǣ2 PLe%9e4:$!,b`;PσUȹ ^ՃwN"zs6|`?Axt 6y nĔcVB)`0 *<&YY0!Ca=x.A"x共7bx3\j rmqfdk(9`m!Z1] |XeIIBr9t=OqQ!tQ~g|ì*]†J `xgJ%dx 늡PyN@&"Pb|&_zw *&5zx#&/Gj-lѕ,, ((e bDC/X47wԇ42' *p.T0Q p >h@}ӹ}*/Z74?CkJu9H>̓‡d[SV5Cs=3r,3niBHS;!ꕛeàDf‚QIMROWi*m_Mlλx #BD@ :aI)Oijb0a4+Ar}Es@{ (LF[S(&;{/aQ1 |헿Zp@71yt*svbOo*dҠ e^(} U9Y{-<%W >&*|u¹ c-[jP6/_dۂ ֑řn MsVr Ywt+/ZaGNJn88(F8Z;);GD@h@$+,ea@#d t2#Ǡ  Gn3)Ԟ,ȒH,Dν#]P$Q3ʤ $z5LZ1,qLD+*ցac0,Җ!HғEʃ3&˷l25=.NWrVAF+Xc ٕ}5jp🆷v2ja֡S23}sRRѼ"*yu'OYpԶv mMk}8ӳQ1 #8xAք%^Y1tP{h^yܦ8;.[nvwuT#r-6ꈜyYDǧwɁz]Z`H]s@9yG!gdsE~ˇLo@'x7 |fm~Q9LH&*Efl^ZKE,5+ݻXAþ= w8R㽇R=e=˞WmfrhQmIhܭ5Ԁ:T/z(tc<](]S+oGxvl\E!x0^㼅8>Bd!jB|t|~/?$Ҫ0(8Q:'ʩq8VZ5P ѨD0]krZv#K|i]~s{ûɔٽ0} Rڳ{0EcJ)#53siRG7㕈 ,rƀ= :(O: U.`iCkϮ8G\/N1UZ sJB 0 qu!=9M׊֡rtU32Hc.4 BC{ŔցZkuRQ4 &PtA`,%]CXSY"_8$^6+gZ;05.#U8j`n]Ld" VF_]5R[!5&ZD0,30BO^,[pܠ䌾Xd >{{=|sw=#0~' F/=L ٝsg B %:sӛeap_QPgG FT=>S4&vhI33 /7z@hSo8n~E5DG1]9dm(w"S+[&Ç?|(2G>U-X'D-|Zs'!z zU gX/Τ!,qqydc31ިRw ģwPnIHRZ)5x*| |%]݂-E܁% +Tdn1H&HX"^ilP*唁R@(B 4>R|yރL_ol?,Ȑ~ތsW܂7 //ڹ61 L޳L^݆21~ .`eVʽ b+sY<!jcsXdʎWZM'Q'_Ǔ=Oa%,&@>M78U)g+iG㼹^ٲʍ+sh59-_ѥz= gz7_rov? ڝ @v#%,ښ+ˎww{Jej")1I:, TP&Bg:;Upz!vfbqJc'e>Fh=7.`0=TQ5!pUW]JV\ ??@@*]?"Na=&{?\|@g&ҁ1uTAǩyfW5tpW/>|F˟BBsV" ET*B!JWLu~o-}ӯd0s2JHD`2Ȧ"YRiu,;eKw5FAlm,{GJ"w,ZJGH#qhlX817lQaоj<55KݭQ%c4(xUأJHi R~CSj7tEWW~- aኮ1AX/;:ON+BȪxR@gb`/9WnJ_) )AsjNeHైPBugD. ͮBEAsy"rr& Jq%\^,؆MDA+5vRއ'PǗZ\npV":|Ed0r \*'"$.b%1 G2QƣٱfH yVR_Ɂvzf-;G_,| cMGSY}nCZMx`Łi[JM}k+½{yF+EȾ.{V{+PYkOn-WoW%$ZPQ+"N.=gLn9o&4#D^俓! m !%PKM9 ?0 |JYEtoE16SgY/)E!cmբyxbH_§|[;x3Q1g>5:i/9sAMnC6f&` YQbQF\m { Q!2ǝ'ᮢ] Tmͭc-ZmY{B\mnS<ҲytnQ9`TfDoG> z0+vZLs iVuByDb۽q5J0?hguvo2aP]Ԏv(8h 7P?'/ZdΆ+R^z[r:`LR؀5ߞL^I8 NWTNLJ T^h)ȎmZK~c95Bfk)*gV;ywl#,TS}p$ iuQzQ{=gV΁6nj;KdB@eZ1 C.xun2B05 2aM /JgJy|?wRLҝG'g*It GC9`)fuV%q /$}3LF`{^ ^uIgsp k>G w3< O䆓_DT"Y3I$ L!w3 pr<93m&0vQR3L"lGd'Z4SvB踲ɞ-;, &Ed_@GF*.\͋pa>$37q=ԏG3~Rp. NŞ}?_?g¡KETS6`M\njYb,eT:_KR*4aR]"`;+Rj29\ ?R)օSLj6ˆV -R=MD/G*! tA "3s~p g}Oށ%q @ `JhRLMp|eq (|O-.[y HM%er 1o6:ʚwzABzW|ww UW _{>~_0Ǔft'ίs̚/_S/S`2SxDDH;qȟ.daG}uOٟwf&p0xB|s l* kCw!KKn2yLYX}ns_nCFد`a\&n,#E҄mOo./ø*aeDcv wwj/iiRX=^Oe'.F j<"a(Z.ep+k'mt>!w5`Oqeړ2g,3=!"A T%ߚ!l~YQvj\[U4:#YsX}Zwq/ "[ a _߯I͘dngF><7c㫙΢F5}k-ht=2>,>L1`3cw0$`/Jd^߻WT>nB1t<%j0Tqü@B0GL.^WzG<#x\{e[7P9/IC\E)5U/S`8]>z*1uD4;Sr1*#CF+f 3LiH7 fcKL0}p)8_Р $"fs,6:5 @+ 0HEcRAVCgV"ڣuuTlM:ui[ =*B/*d (ĽIx,,!Z`. H&NBLj-mxیj7PtpaM?|pJЛ㙞 80z[v~G3^>o/C_z \-'HQI .ˆke $BQ0@)c`o gcx)RMF ن.rPX䊮ESۗi#2 N[f)IS$5@-M҅Ө: u0 PӄLQCBQP)F 1V+ q,*11K\*%L2.QXsF.kWa'BVx3rjr/ đ@i q[-6 Lʶ"| G-RZ NTVLX > sEW@dpqI`a1w nv>-I=o $-ٔ%rfQSEzaPCIzX0gؿ^JEVY7 `McpEXwyFaLVURm[JuMxf[gwb"9GQr-Yޙ%NY_z㢽']rwllLYy|0w_owb.Ben"E}]@T~i97j4Lf^Q"c:}Y<=%&C F+P})J-16E *{7YKxQcJ9*:bmpyZVCE$3U2͑72ut~]jם(3g@78yK9XC I:^qth~:(EK-8wU)d-8Ν!u?Z/uIdIpq(Ӽ[ա QʻuvQIS6 ]L~ٗ[&DH\A^$9>w/(Aa*ո[Rbp˶Q, Ӌ.jĘ|jmG? %ZY>eeVRC!8 ԏ1R1'Z@xvλ j;+([-r xxQ щW!VAO9-Wplq4q4cSFh\XwH 8JMU& ױ+>}TnuS%pMj OmcLUpeR*f8,߮4cN [KB>|pưRP`Sᘶ% R([<gwE w?-^:W%E} Bi:Cu)'ϗ9 ۫'xEu ׄlWqp_[@&0(pɤm$c_W_Mɉ~=&3t7p֘©3)'t\̖A 2_7$q+HcPL]KY=.5$ל{Pa)@G {02Gihէ+=|%R`** O釀IcҐk}@9.۬z*&7LnuWUbQSYj79A4:J.Clj=)^wLR*nP,ƾ~/Wo& 7F sp;-,uԥ-Q#dHJvX;Ak`i%NALɨ6y31k`RwP %C.(SlY8hPBQ2w%V[U R@TYch Ã"fVK_`&NRVhڂcA:w!,K< e ƅr YA5,T KTʢ[dPS2zajRo—RH#T"xT!v3* x餇`"W^:#^K> ꍒR 2g'p 1J֔˒YeCZP=g_߸|fuZù IhC0D^ԏl5E"h?Q(Prsܜc]sh [Q)ESU6p?՘~6Te/л>6G3 1 wJhUZfG ND3;AfwЦc#M UV3"} ]J[GU" Ef X|bDY)|Mۮ{ PX}xh sM:'/.Twqbgx% gu0}`L ̸|Q|zzz-6!5l]!VT2VA>NyZ)%1 GLA ;W%C9eiV 1%Ȁ ZR_Uj8wI 1ZtҔ%fvHuK(c(N (U'P6-hr`wصhq)jl"@#xBh#Oy^ t*AԧRqNc#IA3Չ`e#+ticEGk7Y!K!?bfKIMI~^lTec?NaI&yNيw ]sIۖ;U,)踾x1Zk2!̠vY_)H1ּR!a]W +AII oàhj=>f5g,-;;N|PY#Z7 )' H npi$'] P#k zn7rh+hͽm4Ŷ Cr@ĈC] P"c!E܌F\k\G*>U7UkW:08EL`Ji;@7Tm?],oJ_:T\;2)zc9V#7"ȑ!h槂Y@V`dVuS>\6>&ޫL?^ UըTU]A@E(bwzg ]D_23!H89U!/R=]e#lϕ0ۋWOB}YnY\#Xѝs2Ԗ~ 6`0s^ټ+#}?}jng?dϤwxb%9k\t8D*A\fn?m"~ /IH;!p"Ig<}rʘBG%0e8r;& 9 }Yei*K69 l}WSD@x>Ɲ4gҧ'zL[䮞?([ _XЧƭkn}2/>\FI[Li91m샭z}btmڊ͹uݪ矏SC֐aǻGW`0v(SBI͹ٷpGg2Ԕytt 60 9PBĴTdZ=s|lmcnt0Пe=+T ْ;ΊWYl^.dzM' +8?(A GYO;mm%03 9ӆT! G4XPޒ=G7/;2Zc9cxarޅ.Dq 8=ݲB2O}S 3ruϜ^uQ1EoPARu3f4Njw慄,~KZv̽.b#ߠ szUgr [.`Q:L__~Df],O '!\OaBCLs`O*7Cgfu&I9_%ֱNd+mywUxTJSvqo$9ݠ@FF\4fOϪ0dpܰsZW-xeia8R(KDhNCTKKp"gbE5xc$wr'4#"}|.ӏ蓠oyq(wHEq~Cdg /I7LLpcnG&O$e_ݘcoӶiх䇖\ojCY&/yPn=`ݭ38T`x4ǎ2 wPGtd.&Rݬ2o͹"j@ea`J*>0ad _p^ 5,xXF .[ ڂ"=@Cr&W?dm-x1j6.cL5POʍ$d"~IlS4'1Wxrŝu0$(fO"2=A|z@keHt/S&V9i'O·6<%+)tRhQf.3}sW~ .9tצ-&F@]'(%]tdK(i/MY~!i2!'zY|{~g^8^ʂ{_I[t}YTtIyAs"*q2$ꜝѫp_OwC*is1wU™O4=RD%"<]1 MKRE[ ?|ELx9n@^8,_E9㡒'. J( N.GjĘ"hmW=p&B_A[pfy҃ bTgV4SCS_<4!X#˾ЌCGXN/!%a9VK$ւ&|ċs6t,gY%LܤdUG=!ZXEQII%g QweI|5fJG%@OXYy(3H*JŪ"Fţ2ȸ22"E9|)O0+p0 Όw3|WY\X3Fa?㇙ al񗦯ϳ@_W;Ɔ9m"}qi4 Gf|I;F "d/|&}BgߨrHn}w'8^&h+`v앆$p<ƅAW-@@^5o{qZj{r5:To׹W2IG<N~BeA5@w_ۛ68T0QRD8L:LiLDt Z+Y*p4Hݳh1#;sL'TzWP*Ϳ,vQع;c> S2XEODbïEfQ)It4i&ѤQhP_4rS袡 JbD$<(\L(I_/Pԙ?|~Z ~J<|NRAӁNl0RLSC_1Px~>ƃo-2!y0TUI5nѬ5R&6lSaV&;Ϝ"Idc՗;Ibt*~o\sY1yGaNUg?5/Lk5 :|R/4!uLW}Vte~ >?W 6 ]/SLI0՜Ru0[۟yEzg}x]o[^C5덵Zѝn /q)윑\L~fܫ۪Sos:Ү9׆ÿ?sV&%>*G1j-4{;2T^uMJ2/n= _`ڳwh5 5%Hqp[\j2\t}U1z4*༸ٽ\j=Ncl]KCIrA0TL1h|py4hi l' j5 ˨eŜNiaK %io"bKF#\ A3B wUvуB}{ =m`4 8J2jZjq'f*0t(ˆcY&}Cg0?oFh?˫S4hR츠d2_L!# z) pIh`Ol64In^~xo/ؠ^ Eگ_<V%9f2;ROOPYN-vй+[\4"X3=x(t-cgOk\~tf} b!$f T Y$5evzCL/R2m_?%yrn:j& EG{ȽtA9 e/IWS*+x2^D)`cY1Mx )J;oylMzڞ|ֽ.ցJ.л}մ,(}콓9i\a%2(,((⽓x' c-E3K J^>=dYZ3+aL`H(– =7>pf(RF#ջqBG ֳU;5HCYC1,׮G'\飄B0&q*1S'ŷCM귕@F%ʫu}@*%]578[Dӏ`m(*5Wv\+ںբ8#/"s 8vBu|³HQEi ](Jʨܙ!/a'_ }m˖Y8 mq{&O~P;|=a:WV|zTLl,0U~>]X_w5oLs\9~d:J+k '@^CJJٓzJR󾔂wr)  +*h=`W!17F2){l•O*+|OmjiJ}wߜݥ~ ?<7?-O |}p<RWx^`ѣD8mi9 A{JRS+B(Ion"cѯA WrGc6ćʐ',y.,lSEH&[GrY'YԖ BO+K7JS"cBhvj0B.dsN z^eQ#'pCzQ|IqE&Khm%%.ݒrg,K|GF Y9UEO[B=2|xgCl2L{~3;r{>f"L<̠ ,X\!6B`6Ϛ*8TpAH,E#TEANH)Ѐ3}Lg ս:Op(,L?Ns\7?'RF|xMy|p!#LSe3o8wlxsq1|:⻴X? t;v6x2o&|"pmJ0\R"PA6 :|swޣ8L*jZw:;k/Iu%+kxLZ~> t"sJ j%`ED]K#R34;=Q8!l>xi="T2"OSeRNR[A$*+u~۞j0҃LD̒/f1^"SxoP1@{&ѵzc1T#w՚ㅊ<52c0 a0JeӧЀaPU -: D97}Sfl싫Og鸸:}(Vb%:$T. 3*,HY㞙 ?.&xIuFfYN.{Pr1TJ13HYF-bd؅epz1'*hYa"uQ'EV0 AbrUISa!3.l:YPy@ݹZĂXPVϿO8 ]uv´B;58CH[Ϟce0Lѩ!jLQ# Nv jQ7aYU/rϮIڧc+fJȦD;x6Y$Vx򠵿īI{zk:|L ؅_%'D43金{aQrFiU'ho._CStp3EH%%A*f'py5"ڙ]i2MiLӨiNMդhk2n{3>Q6m#鿢/[[{U;W^\qᒚAeF34vn߯Aj4 % )#VO 4ӚNArg8-{;Bj&ں2Yn> XmydcQ4[K(Fp7o3yJ@Gyz}(kJH9W Լ\$^M"3~\m\52$  OSVGs.a Iۤc*v<>:Sy\‘>j@%`90:AJ9?t.lXE{ckL"J3 U1̝ƑQP=Jx9BE}qi!_x-` Y4Cc+<^E! 4Ys W!!LS9 /l#4aS^8Au^Fb&T _w5J /d"PG^{RJX d2c)KusdϵJ.w u*K-SHT)uۭf;Dډ{7\M Fj:&Pޯ6ʵ>sb+0wQ̧W7mWe35“b<]΁:In*^2גw4߁!A8 >jBM{9G#Uu@(Gp1{),?t?OJ'ǝyxLv:|rF {Έ1@31FC@|t@s){ztC+v; 8! *Ywٜh|Bm |3_f]zݚcǥRW;js r|7mGǏYcR6Q!֓] }cqNTUHw+2O令֪r}Z,?~DBz?$#)],x}Ӄ[bkTIh]cF˧7Hik5['=43`ή-t8 TSʴ9e;{;u6M!%:lo|,igainfRxI4zgSSw\nyrWwsJ GkF7XL?<ЮQ1a(,ܪ{Qsj;E+BPWD9ۣg%cW)sͶɎR v\DXEZ;Cd3cocmMC81bjqw|- HqT3#=Tf2`SԐY"&xroW7Ȱ3NV8%HPȵ>tzX6H58uh4,#bl_+<źoaOtYòi_e y$ݯۊ3e)/z@5XQzZiRYL:P)|?ޅO;ʈ{|'2؁'rOqg'0(ӧ2y0OaXo7>j#@nF&A%ҕ=nEMڠVZa9mh뢵(Lk @@V#caZQӤǮ3Ezφhaj'Vps5$i\Ш޳ڻ!ʧW~1nhQcsGZ]ytpEaIIҔKǙ*y*ܑah2ءwy78Gߢ 0dx7K`8!+ $@ӯSyy_}~@ޡv8EG5#u]W CS{xى&^X%:FESTo~:&6^Qt'^1at~^1sDNYpH-ɨD{VX_'a=Z/fi? NCFB7K׋[nce_|٧RvSn|JM5lՂOdrL$vT1REHj)VCKXuT 8r2P8'N'Pzn>/ dtxń!c7m,9Y~r~؛f/o=r"RJlj$!{|(}aIb.H0ifq %0;gDTj4~q$Y%4)Ԙ'>2As%cskN!F9#Ы sQP:炦8&š(~@Rz5)qЛ`{\ji{5tԝ7R%4DN9w2%2 FӚ,sJa>xxᒽ҅uQzz=.Pˇxj V28x1;߯d!>SE7C\DDH\ʄM矾cX>ϟܽH|&jrkoWQ}{eVc=}<r5_almoOc H-v{VKE =j0M/AZyqi %' Q=[8X j{S(]|s댏X|>~ "4zs곙V~AV΍uxtOABY% pLڄx ( 4 `r+(ϠT iKh*O17.)v be˥$9%9i2[Nʾ/H E*1%AtpD3Գ4z /4 ZqF9n:7RPτkstxFQװ3,+s[>,Y%k;kkRv3 sGbװIMwGg5DIJTnY,&c Du50m98'ig2teD9F]e0 c67 4dzmqy*v3>Sg6> [3(3OFUٜbk!2i@,E"X:CL9c5ȠdȲkEN4[kԌKR!#@&= i ) OY|)-+,>DTe9Vcn w4n\gb0PyCSDtr܃Bn_/$Pwj0r>5:jPd.\3~X|nu԰94_& V+C wBp=Uְ+U֠CqbtU;\REHmgwjuCzZ[=1ʘ6WZum=6ֶ I؈7 SʛDy>nQBnn[˭gOc98.%bd,?RS~x{% ]FN5s88z(Hh?v_NG.^wgJ0DnٺՒzƛػkߧKiye}: ೟߃MNIl!mU+p%l; \JW m-ZӽhkG[^u6ڢ5mR[n*JR _Z7WO?2m!"o¸T,ZlqnHPev#C̛m>g_U9)<.HsFw S&U&󁍊 1B!.c֧"eʥP&M\vptcש8YC:3vLѬX0cFԔ=5O۰|GTC5LeZ"x&ixj23J KRSbS"3b)m^#֬Bc&ħ&7@$ߍԱ ]HgH(5P&bUI5; 1*rÞ,T cO?{㶑OSK H0lO)z8O7wwUnHzMFGgW h'ry K}w ܽld^nt?WokDPr5/q4u:p9$l~ !x%0|5lf7]»EF"c)Ź9Lҏ&Pu Q4y08؃f0p}UʭHHRkxNGyf Xl] n `Ɓ_*P8&Rd4:gW`6 ^}\LYrKK흠wܗцE()cE"#>[``4["-3+=hb.dVÒHlYuPT^0mN{}(5^ # HX~BRjH S¾$V&`Dr1ӷ` MZ, &tNanwکp@A#U a* +!R;pZaAyÐb.ZZkP4b ap \%  sВURZSsLA [{mֱ\6~FSjXԞcJ U@kxKCMk`SՁTa_9mysZΡ)w7˃;{dlu4=?-]P5'ְV# j@(#~ggwsq $Vǀ!e L2E7B):fD)tھX0hQCf_ xR)A{֕oEDbU˳ =8j0ZIrX v4E-Z U}ipR¹ы7L7b)f?+`x)xMɗ*??AO :(2H2híRm*/m <<ޚ>V-ҭe,-*ΥZ-^91ZyjZ]!7NrhKen)1u񝝨~0|&d| _~<Ԛjw/%H/` $Բ–R)JSTԆbS)9{dc' "!&Ѐ?|&ДϳĢ?l67qwm$qi3ΟQ ǨFh]-o}Ϟ! >NyfǼ¼ #X[l_~]f5ʚ2saW.#ݝ$i> ?']-K|1JD_ 苮%rX:mO[e1v=n|@)JJo;_1 {D"p2V{ÄKh.ĉaCu v%+}ہk8B%ڔ4tBHƄcNU0E (K #Q(nd.p3V[*F9PS 9{dgha5բZ%[lө~I@TX[yG /&fk\ KuQ~+!0&~5|j=nGvo)WuZ#KL%*&XJ0%u% rO{[KXZؐa$7!GpP몮~ 4*1ﲘmǻ*x {zxU.H`dl*b3qD#U ;n*X&2t CD{߁y=vYRn;쬑N*I!f.ɒR%r+ obnwwa/ 2aH,#`D h_ IIQJ@cƄ {=az7̐a^zsb#*8f!x\ @W:)S0zFI[EVAvxq3(XAXiʽ",Wؽ&0F6% J&B[DUÓQ,܏g7UAـh$%΂.B l@H0!t8Ṝ!Âa8 W/A'I 觏WqO?Y\dWc"qD24(pմD25fU좾=Ls9lvi\m+:eo Z(OAXA@ Tl-Zt$򚳞ѱ_3*0?XTDT[1*%j%k΢\Su(܆b9STg]1fYAh ;g0–A[ KiAe"!D(jQjVͩ[lX{E^gxJEʙGBR{9" n`Ȁ$ `0X!ΜbeϥR5zJ-us42NAhDTRNĢT8pg^{*VFp0{T QqgJ/XKVid0`S3UzJ;$ ™Uy֒ 9quKTIԞ> j#@#!8٧2OcZSUbf.=۶h蛅Dh̆^m^;6 sLMuT˘NAKylp{`Ѽ{c@#z u)d9Dh|4Nqb)&Iwޝ3pY{ErWCqI+{^^| id.q=γvv[`6\kx:+HotԀ']XV9 e%'ؼWʗ+_i~Ȃ&Υ.IXyƠ!$a< CCDz6S;yH6]rИRq¼!da]wCFZ!޵ߟz-'9?hׂq&Ao U?ԳؙEo/QwD"u쒽 |},{zpϑ]-5rH6m];v u" 5=,p2' 9B!TiGizHzF8KL5M1q[a Th>Wkw=wfDif|) AJPPe[nv/STK/(XX"Htltj k&HਂZʐF<ͻjR3rۧ$,y1ylV4Bx#p\{A{<B.h<)Yk_]KN& Qc׬fW;i:Z)@_#;Vlc"̞GeXi Z%WԜՒf =,iL:sN, e8M0*H,Fa|;hLJ-Uvжԓ{ԦxRךs KlИiX.mPm+aLb%(B )R ^ H(ZL*iNSKl8.1ݳq߿|-e(IÝpa0o?3'9eW& ? <7z[[h:x71."dW[y@Sq1Gw"pZ`EMt5v#oY&z!|b8;a1$ { `'#!4YZ۬PrR->6o.;T J{lڒy!$v0oY`'1{ ]UFɗ*(g;̄Fr~qrV|hK+.r]'U1D|f]uw挝?vpX՚= m`)Z`wB@xn]VQ~I:S 1v_~;OYt1xꛭ=՜^,;)߬]]S [_!_:D5Džas `0RIƕZ5$T @;'Rbn[EwCƙ`ۿӋ)ACNegO;ĕZY()p{X粒??P[nQBƭ[WέG[Sg5Qwo^~wތUSJo Z#!F4M*{gn2KPRi}|v~r lQ՞Ĺe!6c% bq~жP/)/]FϘԈzJP΃RNj͝yɩto*#{qVLXǔsyA>T:D9UY|/:b¹B_^:UG4c.Ap@o>Cʝђ*y?ueA-zop*Ύ>/dW_4HB;ŻڇG{y=k~a`}{s~q[j PctbS/Ĥ:{Rw>HJjڶ.j?cPw_t@̸VC|qNռe/.u T# l؇  $V ݩg!gTD'u<±Z]N5( 9"mui[ٽ>k4 W'q]C]o#gZEhk[?eʄVm%34V0ӰwxNO,N!E Rs)oQ1])7;xrsdK賙h#! FaY8o'e*ndfxOyNK+)M+Q%1y^:-ⓙ{WƑ /pud]Ja`tAbdOoj["[]Y_feef Y2O|vS[_~ǐylDG#GG8UI0K"OI-{֨,^4)QWkxEFTy~T{?LΜsFs-xƬBs<%<>4_*Ϗ6Lo{Pnb(M2]xF0h4/R>g'4/Tj ްGBj쾖 `ڊ:N2Usљg lO LY #/{h``2mh6>ݍRm{~ D/F20CH$(m& O1wRkN0ipû𻿻E48o"8r S! y*eÐq1n8"W}*'$e<߯Tt u?lP nNx:3K_+؎BmϥŎF7'0]ۻpc{NzY*cz]Lz3ÆЎ?x) qke7Smo ^u51ruOz>?fW3陉R8wvc+bMEL1:ὬvSuAU FtQFMITYjMknuHeJx3~]bK ^™\Iwt|% >e a4vqK-⩫vp= ƙ:%]HI\BBrNgOe-9f"YA[}?pݳ_\sGx]_X,7KIjH8q&SmRF1BdX*F"T!n{/.{֊Nq ̜3JGdH -jq.3`M4<&kϢ2Ӿs"TĬS=ow.gUC]6Uyנ)%i^a]rN)#BnEgÂgzP=[J@ѧGF˺+gf$Wv*S5@54SxzvhKhsF,s.8NU!8(e3J~1tՀس \@g'pmRr 9Bg"uZPb r:8j|8羞fQt4"y/.\ʋ\[:ƭ/ֹ\bc(=o=FǼFFeu 7 Bn>:7IhG|gi.˻sDV]!?:;QO$Ɲҟ&Zsu|wVt)aZ;?;|=w~QpކɽѧAFa28Wf^T"/Am#( ]J$$r1Uw/ͨ}ҌyCQ/S >O Sıu eXPfL1183* )Ʉrޘ:b K%Ȓ<_`O24)uE1A XJJzTaP-' $jK! F)@"JQxJUJ23EltMJJAAen4w|MU}Uℼt!W\{Ըt;O+ N0r˸HI/G<8`tfw.>f\v`;-NN<*;֫4m$]&sԝjet9LSS3<֝*)9XO30$Ɯԉ)`T;ްpNIvID! w9_K]it~DJҮ4:M?YR I*\bIYť{g<3&Mg3M\KRpAQ}Cw0.1֥σIb{)zLs2?WBI.GJ`:+l:+ݍRm{~ i_`h(f (K (jrI0tUR7w{#z#t(PR2JלkF,mʘB4چVO8bӭPTi#=x݊5vOg.MY + T!g5SK-2XXH%aTk)_/,*1]Uzĕd7^ՓJr Y__IEBmW8HxhԔY8NQ2YMtA2 /CGi3Ai Nk;zVP4ߧ'$p JqZ$a^IưLKǎ[FӤ`'H@sY2[g-2h( N΢cSo6_lab^֬[+15%ƔpB'j+#+tF߇ת 꾰pN4&wȇ*H⬵ k gAS3g,ጆ;o4'%8KoD'@ҦH XaVcQB,ٲL"p6vsQt f\i١Ɏ/L${WVHuAa>N~Z_I{_*JP i8 #[ζ[$G$?}'FMLF$.8hyY56#zI5zI L:)2jE5F$%^u*\YhI&q*f[sљ8ٽ›3 )kST%QQwT: pY ѥp_+AHV3h_daX'u$$Qڄ10Nݓ& dXFe2E)h@@fHe$Ry)8& ّOVi8z_N0a vRԤڐ3Z\LelSnjۦĆ9S隔xIg\ 9셮sIjϮN-SšssFqD"A;i5(Dg9xRgHBYL"Q60(A2h|>w3p0jQɊϺ8! $Lqd^xwB5Zmxs>W B馽}:W/">?t<=>ɡv/DbH%f\AQDI5PzT˥)~Ol t8lv$yWa'aX®G~g-Y4RTyNAsUv9%xH0DН*;%8ɶ^xl6a172]?Qxii rmވtSR׬%f2pf[:2AN#T\`k;NbPe1.]DcԊǓCC¼x{V32%{+^MGmzH~~ϪmXGqfH=e﯏Dlq|=Pŏ٫~#e|3fHijKe 3͜)7;ؒϝwet56 7qDq0*yG9szK<[e98@fO5 *?zNk\$o|Biu_Y~5sb]:|/ȋypgK٪#:]ȲOd*roF{֏$d.\'70?c$\PGMFEOQ0փ0mnYs觖7;WslabwᅪrkqQ,vv3-kj(:ld඾S6 V19hWlVgHu!m`xb uwdIyQ&4R\܇S:v2i\Gq\>+Jbf5juZ5\rrAH^KD¼3sJ CVV'% L&21X.fѝz%R.F\<+(qlݭ:. uYulC:뉚x1UӜO&$6lOS-"(JN UJ o|AQa]3FLq#2@!#F^N4ue 43P)N0 ƿ*`/+07FǗmK5-E"Kk4"n?]io "EgV032*@ 7I 2ՙT(INjiN5b_O ȟf?X[?v&{zt?y74TwTw4I_9Q:qQHW_>"PF(/1uogxBxw>zo`/No.wmI au M1qlX F%b 0ʒ  %QzYGfU櫾90B{;a (UBaDPdf80ǽҀ+y-1N3r`T9$8J3 #ՇJH}{zR=TFqP ;Cާnv(UCJbG0]ͦkR,L`ab 04rZ\J'4 -Ai&HvԧR HAч\W׃OW/FS?1)xo2hI;\s~j;yI4q~μۑPm…|l4Շڕ,>AN~'7VB,'~SXVD~O {1cr;|̓_{?΂@Nwۡ_L>L~co5~wwd'/D:G/_Y!N~.~|q~L4TKq)<\ 䥋.e.06BK/6^h༅GM<,ģRK89\Zw{y43a_<{\yt-^|ӷ?yg'hj;p_OZ8~e͓5m8=qݟm2՟ݞ{ٟF=KAڿ9ߓOLesT/UvKp(.qjGϡ+}-7?;=^ ϯMv_Xh~2h~Cu}OBLT/]F([g%¥pj:;a`fcu8LorGr=Sc VgG1Ɂa9=\L{%酯bavM}?A퍟7 A ҐAq5磟/#Ou!$Y9$?k}S元QwP^|_}Jj6rN_ןQi^[@[F_FףL)7@C㦟3w/џNm:a6o4~7$m_C7sZ u} ׅhR< t3o3s߼'#?a󬏆yr2Ϋo|w߿ėXUnPdzA$_d>[{0$X1?.AXT- %$aV!0_-6vҢQv|kX z;%?P(=;zSppp~o{,X9@(eLHP- #/p.hHAxh3'I`gUʀD4Ny@ZXZhj;C3aR z(L[(ꋎzB!cz粳2v}5muwھJv&a'v%D*l=ESkM9XꙔA -UZ,:Cud]j5C?s xӭAO6&3vKl^Qx0?o`dvZ3miJX)zRnmRH5#mQf/k-m; d'ayZReh1٬˛5tPAGw_;LwE3u#=69% (gwe:CN)pwS%myjw{EVkbzN[AYFZ2#.ki՗nytmMhx_-LQt;رTX}T[GaZbNƄ(ݞdFC1G]s (.ձZy۫y nBI8*܉MK=.Y_Fk?i F1QmVn;id ~`(qTR.˦5+WWP~A]V!7bw.l}(#AC =l@RWl^z55V eJQޡ29d2Y-wyȈ>ۙ p]^m aݥRY$9h l^.#Pc?g/Ϲ{;: &_Ct9~:*\Y$v/r9rHKZ} #Ds#0KՊ^_9bX,iX> 1 /uq0꘶J/⶧GZč'T2MmܕQb @?=6YEtHLh.y{9!;vήn]h?'*Y L#U=E-UJܴ?I~*5+'LdH"/62D$b ئ0.PRʜF[D-vGU~n+0O9M7ң+3H.5BȞ!bޤ'1?ILOky>s[c&WJOw#p"!NgD1C)SڷtVcEqFCP;ƴvJp%4wrVK+Aыk1adA^YԿqqWM+f-ZT}PJ2Q׈>^dd0&j3]ĭTEd}η"Gs=w- /0_}@"e,Οn.f8τSoWrj|D6@^/'b{hYdze 93y{Fs^_p}plbƐ&9wu0e(X$H+Y>ǻj ;wUE3X +*&1wRZDI QfJR swv؅QhLN)9kڄ<]R![}-M 5 CQ۔2xo?Dઢ2ڔ2 ]2A E]l'EhݕVQUmm0ܣ.(gktq)H!RQ RF#纒[q6 \$6QM#灐ӏRؗMy1A7]j%SD9,jfRWU%Ptv"h3=_WERCL5&8gp\1AxDi]"/\L/*SPvZ(\$e:MwĤLU_TiwoL}6(f+s)>V遤b׮O!ۙg2-/fɠD)\ødO2aԑDFb h^ \Q > ٞNV{dB%R U fMG),'?eEhڞ"[i/; OX6ǔK-9ihȭRYCUZTɎԲ kGOc1I1j.ݥ58ugr"T k"t'֮uv)8z`zy"3XdIY>l)<\5b(p-69t5rX WהNPH Bz=anyPL)_ W'c"T ' L{:gyA֧4XMHbxxp/^jEduy_\0/4?P {oA%Zq,SR0dYc mH*!$a.5HJڥ$3(1ѽ]NL&LꓓAU0'$GLv+ĄN%A-d.'v5"ۅkn9nk%0 FL|"?HKh&a8R$$0$!$Ɨ{ (M%̹CJ(bȸzGpaSN`$ͩZe(E%zW`?{^3L{xo7LwhjulwjiCDb6 \)|7JҔ `~ l\BJՄ42YϫXa=e6h!?@ s hzb;^ִd+tA (ZLJؖ闡Da&,@;K :% I~!D = |$FZ}XˑjMdH1 ZlP\/P)0IyS:R>RA+pw LA-ʘn9rSڰpIՍ r+`'(Et5Oqk,Neⱇv,8 "f2zZòTh2>G /쁻&0=;N}s8a,z)*^Pk.|>+|'Rx^Ie'ԢJnǍIa,r{Rb{8.-dV8vW43-%V;1vF|XU]UDp/ w!@ LHc)JD&!E 5 X]ktRL(@RU}c%)+.hs1E (H R)L@0HeJE")R@X[ZLJuqUKт$1F<~͜BہUy@@V BkQ9+h$ 8[ +%Eԋ(Ujma'IRaD-|"|D$B;)|gVIA$ZQfC s0J{&/ uX93| Ȼk6I$>K޾y ȅM3_}ZOS0d"bx=_џ7@v5?>\_Oo?|GB9G"%D?{tc=B",$eWk3_e-لR" ũ(Mݭa-&V_6aDKZUn!5*t3l-YQ"sǬT'87 O΋$5(Az,a#o96&:Cʵ"%f_P/@fQ;VaV;O5 sd# v 4E,cY;AW)"9l.bL GLsUzj2N8iZ̯r[<%=\4 Cƌy411̥yra+3X= x봅υ7XQ  onJ.:`J>j4-ZhP6,zSNr;*sA}U5' :8]l5h$m'|gj|O[4SsZ͹ql^UX=x3:QyQwKwqfL*8硽p޼-K {Nywg5whCƥM(Z)lC屾udO(M^:~eYw5GbpptzZ^‹άMv&:R$ b$(bN)OdǀiDRvn HK4(jLʦSGʘJ j)ɵ{{Xߣmҟn tvE_sAJ*9*E~MҊc_ݛ@ P"٭LsogL<@A}|ki4c7_,Cr?y6 cF{' { ;{=;' MAĄDS?'%׼ 3 rj:iB6&_ d-=?$^3ޟ7$)Wemne57(Tj1Sx҉ژ¼d5YL!MD8]e?V{ߏfL@6ֽoaQy5ϑ~#U r"eN E$cLQIbI!D1$H{\X^j/m\Y-yW]dpS09XIsʈЀ R8͝$+0l&8c2E_IR).+]8z^%)LzJ~v3$+8],ٕLNT7\|J1e#*u}q4,$S* r+qĕ JKv+qc X"a%.UqSPZJ#OT(cF`p3C+>L(PƨБL][ YCACP= f3@yE* `zHpc wz ofӲ6^x0oVÔ4BXB>zjǻ壻/~:;~_|C4z9ChO@fĢ5Yrfg>a>=Wν'β=^qYj;h+ k5EވTVN`hN]8sAz,޸I.YM]bǽ~nHB1=v1L!eiUWL,8c)8|tt}Nh s)&)B:[وAg7W߯/t0}p1/9<`ETl+%RK cSvcx ۱kc@ql+k$PSPDS A S)0߱BGl;+ cqq:` 6s@0y\,"aShϣ T K8vTGÜE)6ae)m"eVFOu8mfupňs ߊ@IDP >eƀ@4i=<FjZyLu; ϫz2r6#׌>@1ӣ‡?E";+DRy 4^}mD+I % !H [BHs~I\3j5zMQ |+kf0Jbq [dDVAvY؃j-*D4N{wP6xpIvSgVIDDjmfZTFiτְք2JZ19 *zgaAc2 %r9P`aЈ 1eBEN$16 Lj&5,Z mv4f-Pca n>oMz?ykt+>eW$%6|/z> 6<,fbMю?_?>ci{z0s0j0 t)`w; S~,+7RYhU7f65l.3i 7f%$M"?.2O?,OALh.Y kKMv]gP ]-Еuyd5^Q&uPZ(^>(R3Me"UZ3ysRqHtԐTZ )ƝN&JY'TChz==%JT\}D9a除䇪T96,-6[m(^Vv!XM:0A}3Eh+EN @(v.թ$uU\K/B r,U. ֗~k:Z/f`)EG\+&{93_ LXS?;i"˧i//rTnNj5EIRTy/d -ֺeh_:y@(#BKuPBp[)0ksWAtH3炠< RDX^f鏶%4]Tk* `Խ3?GhBeRUj{JȿBmzhd~=u6!$zίzίkwc: ԦG Jc8l0Ce0gϻ%FogLyz5zL1F=zGa48X>sL}O^*YHhKN!& `pe$8D0$X{rHRQ%I>e+1+]e;LJRxe5<}Gb6)c6cU]PQ_n4fDgc^wRw? v= ƫ=G]g-Jros"#q\\Nౠ$xлxYsXkw)r4%dюg`EcvaYU~\IwnJ_;QkbLw=n -(lY#q1/I&u@BA.#2/iu؟#[HAp/)h.`/htV#, >r? ۿmn4"L-pvؕF F(+Y-Y7WK[vV m^5 &m$L!79s^p;7\`(IJ$Ci2Fz0̤ eb&C< FJёl7uo`z+6- TAxg/1@A>})f'QS̓AIg /gHS:DD!:"(N~ fNz'_X&+25'F슑LF}{=qMF?^?|ޮȫ~E@2:EII= y\wї\elqL{,[Gp}<~;A!*Q1Tdڦw?Yu2eNxG̵mK`}ީ!RPiQ5hXwm_Cs,g!u;u&IOt2 ڼ%U$n&]D[MQ;i"A -;XhhY2sj>LAfYz#gG&o]݇8t/ V;MFFǷO(PjvK?;Dj|Q84ai1L""YeYd`Rb|>8#dIvr!P ʘ<@@VT/b}=XA9r,!S2*EhPxd2ުe exqwV:-"(;O%9;%}d ̯ƣ>~4 :Fh ô$<6aq>ǻLXVMd `!DtשV@T'`fUxײF.N8it2<댹wO$π? D] l-Ɵm` #\;ϛ)) ޑ,_BM 9%ϛ)78#w4߃^AP/&bvr]93_ݬ|94b8O恫F󴳻ОY6-RK)j൦yP{Mf+BA(H`$1Zҵd\dRz1鍧+Y^ݏ_}0ӣae>߅eyy7&GzU[twvBmJ(LpeA!1jI:HRjψXk":[m1(A9BwrUr5ДKڤRrbS4@\@&o tĹ{A`r ƙ4BP+Q)5!V\0ER'њ XKZR*_*HC1sʹFQaX0e;7(26БbXGtcIi՜AA2j\H1U ٱ*% ԅ!A: tr[j.F|b\TEm\E(v'1eX/v -EeRBvc*NbXػs1\F ]aٽGvN"`%1Mgz\k]2=SĶW^b*KT.Ty[LիkP[ZVnي].e4bRul.CvU!zke /cS/Jp IRfX8oBZun& 8*DTԮQ\OQ)+}:TR?K˼5ږ~JSoݠhLpr8[kMFA%n!5cXS%#pNR>;GD-nՐCM5C RM4&v< \.I=01fI1S dq͘/GA]d N%);Si:O`֙d4}7<0Yq;:xnd]V1 -2<X؁*SX}]OeI臻bzns*|/u4/JM: b7ki3QX|3ZVnM*SRv'@nnN/TJ(ɔX*! } 0Xھ3MLVۗ:D))Bts[*L3_/<8u}jʑbwY,D ncpSb'"|́.ؿf3BѨYMA]fŎ6MEJ%y t B)tR]]. Mr/p%Xv.g^~P{Ԝv"LR2Uwg Ya K}9FtѨ.*CDMm ^(}VSo8vDq&Ի;,-zQQg~@ P!hd8;B3NCY/n:!%B2Jjgj, ,it^s=>"}ޜ9ͳr1ƣMd=dtc4 Yf6![H(gcz0T xgZFДצ8ӄo>bG3. y F5Ҕ M0$ 0&;}(y@$(y]D0$\qFGuغ%rVZ"h`Uњh/Z΅!ą6D6R ( `!ji(Z֗CZ3mv|mS ϶aL:HqVLjQ`"^F񄵶X{C1JC嗄*^Ir@ iWĂ>N]@T!S4$ 1Z Z T552*Iab8># `@!p. %"|ulkLA|V dd(n:BksFZZdcJ"O;P01UP- kZ^*_)4>tA@E;&n2>068_&gwOҥW7:LK,)}ulUtG. 7nz7@[hͻU//89)u T p{O0 w‚{Ao^N0t/M18;GOO,bb y/H ~^% glRQb8\lk $ ko#;8݅>z~O&/i9p$ o}Ad^'mk%s˓ܶ[OƬ^ґH'Նz(;~h[ϻn]Ĥ}(3  *;@{b9{u(\0A9Y/RAi"8;}MvAYJ@xao>L[8ePǽ x{Xo*}Wz[v=K^'R\D?Eyq&yZfO}N7FĀ(>nQag (uDxip`\c̄!$vX}t.X7j(9kT {OZ7[V5Gzۇ=q"<@ E1B,L;¡UP+ Ʈ CK _h "j*$+0  ̀lࠜQι+5T 39HaP< eiKŠ0q(CH !N),,FU0cbդ&7O ѰFE4G>j?C8fC# f  T)7 PkzP 5VGB]. CHѡG:w /8ƚȮORh{K" &=~S {hw 5jÅ+jNqڱIN(da=s3֕:&roժ]*&2O.Gho8ڸ/֍jbknpFqA!٬NWhi; )@- ZV4vHCª 1bw=QI^NuQ5a߂ LC(VLf*ڥ m@e!PX.0wj߅*{U[Pbdߗ +8c ! cJ1} s1rf8i`A'&BpYr%Q[Gn.`8+-=*C.xo܂{\ũ%|(bY 60Hf5G0Ie#DV8:aF ekV 6I+ÚPBD!%J&qq{|p-Jyw&FppBZ6vSGrz%i7jI;|Vb*:(E9fQ]g!3Oy?[OI&`_%{JLKOV>ۇnIb4e'o>NB$ޅ)q K J ̗4:gR57x* !J*pAhYSCbM|+ m!X4ĻRKPt62PNp_ g:5^;45D2{s.a2ePGx{'j0-Xh4~&mB D ޷QG| bGAnn A3`~Z<PspSF͟H335)yw9i4Zq뇏Wgk(?^IݴE]s忖;'WOQ Fq5Gwk87?&Mlgh#$}&|F]ݎT KReO0W؟)4FKMpx&8R%zZpkK} }Q-Onxjt~jv^.|=s)1Y 5fm*ւ=Rm&'` @wv5hf4_Mמs9߬y/u!;vτx}>"*Yrak3|?[3{K{Td_N:wG>9"򴡙#"ᵎm jqHs~td;OhX huT\E揟}NN;h7Y覜0 7io.~xfRPY)eSj4/iW4cPn J*+̮ }?r 4~^qJaeM'tQV$:/<\}Ug#bU.ZuYNH2Z7aņ9k*z~T>iD饓A`0pD4J?NEG,UKh7)n^J.mPu VhoѤ%1X*1#)b3ܤ3]ͧV1BkQ !\zS!Ϯ3P!X*RIs ?u0JlTQDkJm>I:gF~7~IX!b{TߝvC맡M: a??"L"U"jg$67M u!EYBY($X[l6Nzn$IC<1xf$8bWĮ7W⽹Y_ZSPCmwBpwۧek܂h@) P)bҥW81VC^WYL cZ9YlYuļ@-mť|L<Ѩ ݶ@SMˢSPdX {wWsϕM$nE B=J^ ӷx;iY}[ah>#4}rl=I;sZQrNzOFuktΪQͦ*+mKZә)Ld+6TqtQЕ/c>bʟQ+[JDDKf%Vku&̲Imi{ =}cTɣ&\MɎM1+JEHNlS1K'^EP)$Zuϖ\֙DKB엡Sb0=ps{74g U٠x, go!$UsϭcEYc!Dug +=C<wy[|߰7r_f^gg.u: ]~ܚWn>*]!{ 1pkF5  )X.rѐG 0׾|ІY?N ^H!ݰ/Y8{ 6U;i4eDrҟ2n'63!$Ac_yD䉺(%~,Z3Z0$Ǐo]hKIF4Q]_H3-:TSٞ\gqAu2 NQSr"US]h_ͷKSv&ȵܗ;PW?'i:LȪgyGߤR eɭL*sSv'zkdoҩOоWgI+Bi!|J&LCсݳ9PndES0];D>{ mxԍ}e A- NPZ?pvS+ 9g 8Sbc~B箸/[fG{#r=zrgQgt SߟYš5s;Ud-z3F喴`vUh,w(Yͳ`]Wgypv0]؃<8{A Ӷ i=wPwj&Yl(?:jXgڌ]_=\ܐRWmc&USs~SQb*7o5"TiQ 2;vfm6":>MrE!"?)ߏ]Y+0ue,jWJ )bAsj]bjnC˽dT)ňͳ&|ӫ)Im)_њVq|ui 1SsN4-ޅU%/BzW$|Wٶh\Hcz71sE^W:k21w3P_kŪ5sP*Z-/%z?~jfTšƦ_~\j&yd*?<*溜?yWMogl-)4ٟ/DVzOÝ8$H:ٜJ9+y,0(a0r`U5*opL-&ḯ $^cUC\qeQzQ}Yge׋ջ#frOæ  2Tyr fB;wBVhܒ6AXI_2 %! epɎ;r1ywD^(h_ ½(%.18 ep!HTTq#/{ڡu/7__Rax%Zl~!%ȡ8yp~ntwp^.ns?ֳ +Γïq;$ ځ&Ҧڥaա[D]j&|.GDd3WT{vuI7HzǴleqi*LKEn7'\hHyOP34`= TCMao+ג`.7ܷL|.܀[ Gʘb*(ii[IL{!8iHPS鸙KƶޞGO q;4 -xnwŸbGoLT@tIgp%c%+xB2(0m0x)Uq>CM15E9CIİh r]$g֭jS2:Jo_R< n~Qlu_Zc٫ެRec(Oz^aQ(6[,O]I҉lʋzҧ,XdIosQ6?*}JzcU,rn$ V2EaȎuӒºŠu;]xx`֭DZ.$ 62}kݵOiZbPKtZȺlcy zޭ[ۉֶnH.dT 7xzǺ-кŠu; 0n'Z׺u!!_-SF5'Є`y ӹ 2R%J ^@ x٩0%q*Ћo0U5* ުjZ('Ftt*v&NJWRo]Ac8RsZ ,O-(`)lf _Vy)h>W?~O1첋GrNB.+'gEI"sg3fي/.b򥃹2 ғ3iYފ')xҜ]H#Ƥ'7,\w, :O,OJ©~/sKQ ƱRJX8L0szZ4.xӭ ) zHХ俻[ose~Nɥ >X4DO%%KCҍ[۔њѩJ Fmޥ ߦҩWIPyתU~ᚊz*PCU/oU?$Of(F ]U, Fr1ȵuVzeX]-JX)ܺHdneN%R:62p  )͐KelKJeo/0 !NTԞGod0NQk-&Di8%(4WA@v ZX5N6fEܶwOpYZm/Ov=կi5I+tI )B[k{g830[lPW ̈́$Jz0V}8\ʙgyBxP`IȨ4RJ @BBgD3{L22# T AfLP8J j5"T;3cftDK@Zk }QKxb\!.u܊&:&P-]DqG&L;ؖ ڏ±u%-֝BN%o\ O[fyPLD2Reڂ8SGVfM.!"DtE7=MrɸǓb{|9Hj"Jy ᤍreJ7/jD5GKuUPRkDN$aՅ*ԤWSO5T nj5JXa% r. VE(H"H*Y V(r(ٻj72J*gIE ¥;THpEH &7*IcRoLIpXU Q%ު6Si-I۳V]#'oHA/z7єCi`ʛFffZ*h"oxJ&QOpʗ"s4j[xMySBO=BǰT2Z ACGL64dCՂDiJz[г i08m&h|oKQ~UՎ4È"u:'MUt?O|E~MK{#tt0Y;Ԕ!%B:7G&l&7o-n19/s5lU:AF(qiٷa9,eaHF;G_ |u,Ecכ_aH$'dsE鬵OP(Ks=ǚMQ0LR( 7#|>Ys4/RR"eeD ֨oW?x7qm $lMiK~-_}ɛY& uiѹZyc>uV꺆:,;{I+lAm½ZSAQ^,-)KF4ސޮwG^{/䆔oSZf:OowoozQo[1kkF+=t6ȘN]l@ϓ۾=>نj7G0{Y0sLZMSI"1qhwˬG[ C>g,4ϻ3'`f%A~\ׄptjirjyz3=Cq;ý wvy?&`ØCu-"Zo/9iV9VJ&d2jz{ !gֆ<īr3c ddr9kVm|-5ǧ['y;t/<- >m!爚e91.8bs- $FA)Apy;<\K4jJģ9i8O慃DCXМZF`Xz-^ ʽqyGm$O~ڗ>¦. |bdv>ogfrO~h1 ub7tn֎K+VZwϻOtdt5mt[ż (k^oSm$=v¯ ,@p^G?p鸧=@f=q䛑,\zc\sNIQz®x9U,CXF '7bX)a>feT̊VSf(_եPPP JgyljU8(]:= ʕ-VSʅPDUt6k[媧D)5%4,5x^XHE Y JgΦY=>-LOk=\7fEFW4 8ʘ*5871QH5kr l+Cnvx-b)7xʸo@l"VV^mzlp59x[0Hi1MÓfEK32E_=~ ejx^;B3!#cRE cUξ*"NK~JW_'Mc`b=/Zӈ| $Bsʼn)36c_U| DR{ے z'<6gP;8 ))j} VU:+Wqda$4J楎[+8OݸoKvIm^YYNv&pW8lgE+d[|k-ƌuqňXÔ&(n)W)o<%&D ,B۠#hХG)#i Ml.R#:w{)ox!#`lT_꒮P '*&iB&±ǨrZ&ǷȔƇ͹_p44E#RKO=[AK-1jԔ ^ƈ|$jӞh|2&_Q+&Qp![A}4"dtӃ3pTNEtnuh-AKVv]?vP vf4p 8vr6+&JG)7Cz(?{ȍ!_pm00aqw`gseA?3xl$A+d%nfmuW"YEWH+4- W?v7P 1|?k¼^ X[üۏ{#ݜ`7j%^ػWX6Gta]O&>{|/>4_ypV;0_p)_\΋ͫmx~zɦnϝ(DXo1U$3۔T{MѣAm Rt k9vH7{)]Lʇwz siU~k-ŪI!siu̻6mO?eV6-;{vrVtDFEZ|Tz1Hb:_ڹN@>Ǭ:+qq([fRow0WW_hV] p`4Fr/so=ipF0PA֔?pH ãaXzۄf쾰l+ eV,"ۘyk?zKDm\w$v,xżj [W_Lt!E:Ϙ,tVd0Lni˹J*}h-0B2HJ'\'<;䥙wr(s[X*Kf(@!2.S!P 6.u  y_F4İ]Оq j͑LQRűh):C֚"5{ 0nm7i@A L Ma 8icKע JsE+S"^VHf]%+L_\neLUd8k8iSl)QJ\(lA[hrpPv齍t-n'7ޟ|1nS-Κg4_zo8J0"bdNc; #AA3D"`3o^ !R:w߷eڶ=hXuub5-Y,J¦Ufjow6]2'Xl¼/Wf1g?ە|5_Y7WW7+'[1;!9T0tXѮk^?_Lº;e^ M6%UPd* žH&ËfD9+4zXMTv*f[fn8f6/QO0oxqtIyl2{|$7 [8V//_2^@wcҐ= 6GLJ׬/^%|n;;A҃Κ旛*TM]U[՝>xiz8BW={Wﯟ]b$ѨIz.ګf٬KMSu.@WRDRv1WfHJ1LpYHxHPUWpr)뜈H$'ZR:E2YBxG[tRiKkƧ#cnޑqRq!D A(Ɠ00%:LD,`8Lq1hc]_jř#9Л]l7duq՟J[jT0n!+֦5?}ǖc~xuo͏8\Mfkuq4.ݲHSx\XF/a!\CHs?gmJ<ެUsYwnQ6M'{7[,>vuVHshRXwnm#[P*kȐF(*)MJ,ћ=C$#K !iG1-ʐh:ʰv 4ʈ_˄+'B1Cj:FS!g1H\pr$zs1BX3gȌ eR, @Nv'sKcSprL.^0w+O m0x~dyrkq:foeq䥁wLW B%@ߡ<`x~߾Z(Y7-RD tk,ߣThfE IԲ b`fRŨ +^! C@; &i_WZXz4pDET*h.w}L"xǓ ;=_RS*Bu%"JzBܽ ҊHAƫ +/Oo )M!þk1RU^k!-XK>'~i<%_ EJii+AҘ~)D!4rP cOPTgX(+/ L{#A 8qX⾬~D|BW_րkĝ[ }BW~$r«\eJrGFC6R*Y# VSJ%JptFX Q&5ByEaP9E%(/1D0.(qO)8'gu.VG1% RJJYSAsY y?VEVݽENAԬa@M&!.Kr SEkMjUcT,a$\VPQt1Jj)CB̔M7f^_[@t_??@&L #>U5 z"Z88}0KW˸FkL&I$@$m?rٓK!dcP կ|c2c!8ΰTJ9=s5R0BkȺ@]Ds:pR-] #e]cڞ5Ji ]̔y3L}χvt$1aI{*HJؔ V"iN^xy(i)>t[fVzPo4Ge8q&n˄"4t euE]!&O}L+%&tă+f|@X_g~finna~cgoKnЬ1 |۬0˕A#duZ;k]\|Bgfr\+VuM8՜%k) +xrVc QԤÎm fE5dP/y6j -kP>]ҨyަKY||ύf,^{Wt1uPR'N OJ &gg.w)]sZHNtYKkEeQ9ƥsY0BKyn(0%ckm m,:/llB"'pperplPjo;SyWg8IG;tsޯ40l0Rx4YlyG$Jx䷫_UTLPKƯTΟo<2Ѭ^YPY؎cχ'L%%~4_V \/ݿt;^_|y ZbqX;,Mk(kJ1&/SDM3XX#W؆-pr՘+JzPaK3hVmz LӠ3`$++g `2dN)D|' vpB!ux.QN. K;o$(H.c) TƀEF +A_5Xi4Ġ&yfpR(F&Bpv3Tqe$cEIMMk*bc VՑ1r]!|=?r;;*ztS x K63HYQDWhS/0MωHSkxH%e-T !7Q"986/E.f>(Y 5/ ,F*(?TH#ؔp5V$ZtX=ރcH'uzJLldnY{'3BD8?q.(cNT|u@SCSvz<rG)" ;=!FCT4斔IZ!L,?E{r;l\ h1)b+ 7.,zy!B! R ܰ9h'F!2tNü38Zݭ7^>2HĨQOCoqJu-t Ou9ۨ8B!uR[ Kg; m%`4Sw0Ia=T8< QYFEBFHDaN@'8$ d`y  p~5zl]b^.c,Xel;{{b,_䥙k-"? WR :CL&T^fpU '; bۼ`]Q0c%+*5BKbwd9e)))܌ET=:wk[9MY6yǀG 05Bn;fw$na^'{柌M/]>n;MFWFIy6庚緳U0{Du 4[ǥMj=!yO{CZL+Y1%dZPakX0hYv''U͎.< O8_WY0T x ]fx +ȹ5x?V7k5nszػJ#F0rFdnįY$E9:@\)/ɮ]_{h#2A[+aPpkEڲtsZb֕kLUK^bBHȔYajvfF|ủYҰ|_%|#!c9)޿oflHhMP`f eW?,5;]+3{5MmAe%|_?V\]:_'ieb߱-^ݛe#%,Wӱ:ЩT Aޖz9ߟqEd߉GѢS*&G!Sٳ1j{OMӛ1J ,Ww|{"NUK?Uf$- r|4dyt#&Ag^g*hnPXXa0--ZuuxZ$)LsSiKOf,װ~2wC&,cq;/2Bpȋ2̶WДcIvi/8C b -rncXP}ZB0.a8puerqatfiŲB )$PQeOG=n1r=u%$oB Z!Rji<̔iԊ"X1pM"6$9&L u¶ϨS"x4a8#"I`>1?ٔlqgFd>YY$gdA8q49SII7 D^iJհ`4704'֥2xӂ怜U(FL0n$Z5(*Y4WsYmaݱYq$6#ς0Q&t&GT|n<= xǹLEG_~ Ӆ=LlDkftLUxhHfZz uTSӽdD76kgiyy:}С6Z` $HȽ 4^~(R>vG9Esc|֬~yh L$r<ɭ:[@AO1FOt %r:os' K&ƺ0\+wjA>[ EhJy}_."Hr;FrȻA)Zks95ȿ W%aESrB'w*CD/gzܸ_"{Vcū?^Kv.^Il }F3jibwKeıi_B 5p3toF&9fm|+;N2}NFs-ԪRi%'UGCMKٓ P߭D\yj#$dB:$LB 4|K@.OC&p؋"ZqS%쿃_Ud|,]H4~]Y,g?ߧ?RYba%f8 2a'>N}\7tr7 E81QjRF$=2N%^ETph/4R^H0D5S}E=&K_bɂAYUϫOB_t/aNF9]C[+͸SFg6qF&)BNCd9Ϻw s$}H$qJ$q= @JDzxdQ2B, 8؊}}ɝlZ"Z%Xp|PI nG$8ZSh9ue cPuJYe(j8 .0*M4HzAU7/*җ-H]Q;9 P3F7HԢ v?@@BOA2 .5w̽<_9B)F5MuF\*D?z>vj^9hG|h"~{E>!NNNE/1W(#@OU2RYIJc$mf;2*WN YIA_ pp7>9kI{|PT^h7-egtF9wD_B} ]skZjϕsۚLZq%'w7]V&YY) &gۉHMgco{qj}LȞnL6R#fqb^F7'׆v>#\4NFw_g%C Rnp&t*IQY)P  %0eNeէs؂[\`r3mdE#GOT'%"pYI>dg1-IS8Uc*qxxJ _EWzCDe"3:E*ujn>&~avHzс8I`V C]B8PQG@BgXfOV̤\^Q&%Ȋ/]"5MѦwϟOSY=x&GP*6PtW:zsꋝWWJfUV.|rs;|PWxeFHGk~sFdBAǓiG>$W@Hf=' w&+Nߖ4>ZdV}P6 H?B WЊ[U{[Qj#ϾpR*p9Eq0$ux$:Rpfr&!Z+ƣ`hZ"$F=*W k97[Ji=Q;ܜ[9udiBVZ:p8%t%}\GqqhJ0'7*VQXJ[ J;%FL:)P2P#dB?5JKc"Ş/\s_z~diuw&}<; )@rr(ʣ5Zo`u#;46 T&OR)8TK*I9bTx '>^U|~X&.~yIs ?~x iw?=0[ 'xg|09(^R/]KT| x^|s?^WεͿ_)ܭ`O6ц<ʹƸ7oBtn.w.tcà\jAz@YhHZB I@w*4WCP7pWD:TxEGS @(taSI9@5ɅHަA%i!Zۯτ -C-ww\?OF8+ndd0BVLYJpt+3ZKp^)/ :K_Tt?{+#ARU+-Jt禮77kD76m~1`IA)AѴz86 j2hc7ʔV3M,<)`m.B y@+ʉҁE }##fLѠ^(L_no^3E~}Kڥǿ( mmcV0훉e!L(Gl즔 nJLjd7=*%~k[qԐRb7]vGK|Kpރy> yĘ*_l zO 9黻o/w- ?ܣ3|/y觬 =no꫋?Nno],G$^Emʭ'-_*eRRK!.ˋ ܹŀ>R|◉?'B0H`h{a.ه`^k.'ZBʲg`3عF[rQw k}m<9: ai}LG` R9)HhQAaGU ;&"ʗP{=^k;3/X3 Z%,׆~:!*z",HEe ኛCT$ڔCd rqZ㬗WROK\Su" ?xAWTK#8YC?Ղ [|SqR TKeHGDEs?ߏkwL9%k}/"jlfQsi`gR~m _x/Ɋ#X/ YԁMфP1N2 m u. \xdb033ޣ{,d.ŝKͣG._ձ}8-37J F qkfz$*f>dϯgu&::eDfqd1H7mQ+pճ3Fnlҍj@`9ـkx%\Y:f(1?;mqÅ-8vF.9o-T"K%G>PR> wq辚|#ˁֲ<+{iRMֳ`+gwzJ;/`ZE/RԼ7p5t*ߛ7|>!֬c|; \AepmZH*:׿߼a]9Xނ!Prd)I*"`pޛ\zňjO3Bc{Vg >쿽_9G[sƨ-7k1B0#rwg`Y.=8"V JM F[ۦp뼚F n)2G!yO|hAaܖQڡU@rE{+V4 #hkWdqԶ!]4uY)Wh&G.Kh9z DNj :J(k-#%w]yǭݝZAAJbȊdEJ1.z#+F1"W9q(0- tP1Ȩ*Oq`#2V* WڳV8O5h^KhEuqi{]#J^[>VxjMLMlA^9njU4=ʢ'No.}48\ЧyJ͓ 5C[1]~ޣo:2UX|P%iB.͏gQkJj qHKe3k^O߂OG8'U| TC52Bx,ub5TYBF̐eB3T;Y:Z!.ՓNd@i+ﳻk8C$\C$v@@ ^G+E0cx;:g+Q^@cVSgV`EhOH\UeAvapC cuon-I xǧ*ڈ73;}@Θ3HHhnϑC#L=]eIea3)ޕ-k"M⿟ԟFkO3MM@3 [Yx1]q̙w$Ť w*qΕB?^fwׯ^atE0\ߑx-!dUSQV7u&y0/\:ܟ2qE" !E8zSseͼd\濛9fe]Ī oS8gO9L-M;g"r2/?3g'< 3<X , ,FK)F,לVj&UU<~BHrs=$ 퍖2FD OC @n/gxۋzF [a`8>uʰb6tw՞Q}zAl.߯?_ .B`7y<&W׍wISMdG%W>Hw+}r Pv}`_qm&` R):B<CS> xER,,3 d6Ro|!t^GS-(Ȗ?U+vrItX"P1Ȇߌw48T3 L#1zǘ`I{ωN͠3h'`nS4B?R R]#`Bc)!!=uW 4nSN;rKB݊'\D)C;ʯ:^mcGwzG./ 11^nzvqkz̓5Z>?͂ />R4EvOOH(Ęɴǣ3k0.V9d0/`CqA!C7@btJ'F EQN>QQ 鯽qH&\ L䱦y쥠*)TR[\! u֖pA52 B̀$-g#Jhvަm]'İX  B2jgsA`3L2aϸcE,Ly°XKLކLj]QhjC(p=LRJon{am !Ez&Ӡ82m6[jt^vW4H80eL*yDH~:aV&&+H,/ġ]}g|katfQIȉ՘0cs >X2?jo5i&8PN19uKC'A1!`C|0@[Y3r3gz2;pjD?-^ۥgw$cX1ݽsETKpiDa|Uc9?czFT,qj/Sub<A8Q^RJpjbpNUfU(,7NP+ ,?P=![D2)pJ*.LiVGUH# &bU&8L `pI{d>[{gK7V-#4U6,]FJ@*,VnX[vQTΊ)(O5I [ᬭz1RCŖSu ^gv6M1 _bv%$ OLRI0% `hbت8WY2{ц $lb-İzZР`JNP/V:`+xtS(a”}@ ;dٗFPஉYr<ф+U{Ro6nqӆfaKAvJ[bm; wLfTdB;DaHKx! Njkrj .ͿK" 4WܞǺoO3 çŭ0 ,T(Lot] عş́h;EsaL!9\ȳ%`GGGa'#)fabt;[1Ai=хp5*r,rJPՋ.t×BW%%}Fِ$x)h8 qGt[uPL(>A1QPHNkxE5äqv$P-|GVߓmyW=tkj$3e bC2HSW W몧E6H_IT#ռW + [[%+sw/"+g/f‰$䷰^YW4U JY1-"24 :2nQ, >!2)Oә n8,5D9k OAUեU39fB{3I[Ԯ$DtLLDp'Iw'qEY3!X2o_E31_5 o]Q}v6@gcľ gx?1W LJi.}SUi9cDQNN#H'W Uw{i& a,O<͆Wɪ:D_on5d- qiX:8dҺK:.Z?ofPr̖hɁZKJ{&iΑJ)ÿ'sq>xFx/+ XS0Ķ.ۣi}|綯olˏ]q-L4E&5(k'.I#gUMς@HtVŸ #ɳ[b$R#SΘaTf4TVe'hXRxZMGPJ+ t'*jK< - p[|DtM $YmI]PX8 zy(FOU[4'R h޷?,@/ Ud>|aR ͍T 'a2EcR87龁$;1ilՖq IwbҘ7"J=B wfszӌuNoஜxțÞpRDEfc3Yc FӬ 7vS-4,K Mu8NaA@QyPF4zMСEaif ^b"4&OƛIpcӰ00>m&  F:X3{?sbbIe)/}QL,Pϡ Tjl|cNdT8hXWK)|c)eIH2/ILxǙ>p i\ DDž$7Ǧb]L`PPWHaHiڟ^=z,ա!{r(aO(ӈ?sq&L8's4d Tr75p:KH~h_,M̹~!ZNG\\9H}fר9)=yڕu76|7"[@x\5q'IMw("B?0%!IC.1gOe&:q4p6^mMJM랸NIWmPpTy_C +7xs[5 B(9Z}pJYR;f'?0we# *ͤ^x3[-[yo"GNqCk SRjY,3;MqIPbȊd |.@ZU{8H̀lrUũ"Jj2a1o* c3R"(-jv6a *eϵ!V4tXdSvf*~2FzKK~J#I 331CiEN<SJ犌ҕt+gRm񰯴C Q9,kWJ)={Ou :9w"Ҩ:4Gl)6<f~y<:_.|}}^\ߛ%lxs?ofna¿㭽X_о~,ۗJE[+^LJۇܯ7dLfӨgʮm\*%][߫#3n7'ofhrJzsxwO_Q͗r ޏWu*sワk%N8)(iDII"n4gJv\ů rm$Sy[s.v A5Gtv1`y n'ڐ+ѳeʚܜy n`l(A%BX fIԉ;;37k ]8As򂻠H fWD%^cLWR zE a/3?u#{),]ۥz@W+}S@P%i&Ȕ pxJQos8Z\ynY` `~7Ca ^ >~S.8x,r(rn֠-Sb>NLVt>{ɾvs J-5;4×x0'W? w7;oy~&&/M\2B!]d H7c|ύ$9R' TN$8q c PTjPpI1:XID+z c@R5)!XDsn)I fR$6,vB,1!ESN%\j%P(SEZ{\*X`|ONWi庾ߞsĦp?&.Sh7Gz64!zD!HJFe }n;n7-'}W[_gv @,(ʷ#~ǻQ:/LoϏw0f/op~fAۛ,ͱXmB8ƻO^hy-<<ح?Rd5;Uq2Ei#Cb_qlfC#"o|!d#h ]C\.B; ;g?Y\N~Ih6CF{m- OVI`Hp-;*B+璑r)s Y;2hRb@VL`8h89 r&dz{T]B.0 HIP+$*S.%&kRBɹᩁٿ8щFAR` PPhc=Ó b:@*QF$":޹;O4I iIh]J q$Hk U u&'$e"I `@3H]W\&NQu&]U+;Z'r qIcmh}<5{6JMReJJ'zlfuw4fngv/dhNԘϵxu1Cof>ߞO{ljT^2h9  Ѻpoy ?h>2ma ?zCΜ?,ȼeC" ?d1 bp0+7Tœh7 e햇 $wȺ6TvλY)~7ROloP{ f[:_cK)2 9`,s.??v S תAuIEP~m3I2qSѝv7d"48S3S iBЌŪeLRtγ{p$ 4;Я84~޺Ĩ N7Yd?b9Ʈ];eOڭ+k`0ZjWfܟSqWስl$X.+Z+F2E]cFsUznM1﨣sOL5vkCB\DȔ? P'v 4e҉Qگ.ȰVC:/_xVxQE;*kwJ9IڤjVf\4>׈T!b|J)bדxEqCޘ*}dzl[ ݻ ֈֽ+|7Y|ipBȧJtrupp> CQ8E~OuXzMIBdjHdY(V2 pZ\h.<5ppc@ANnP!y7ɽQ\3^V@9!=dzkਾ0BwRxp\{Ctg)iy7(y射r8+io=Һ`PvVmt%0A(b g^'%*dl`}9K1X7I4iw r+}=H%`zO"L7H l1z`]aUwB82Dlqu/l ϔ oECd02H)V&/| O! o@ gCDCN( !6$8% H(A]աlo%P}uC-ivо9(n x^L WnS!k{1㸲!`PUzM~&ٺVa|f_]UTLgi=5ULF',G1D[@q4܆=J5| BVI3jZu=ZesH;-wYz1WKGէԟto ˖m[rc$m;p;8 9nG;wjW)mPPRckQuwg4fCm$呶qK;tz_̄mZ;͟NS5U(@Dzjx֝P`ggFGBZlZBP(t촮APp*spj-nVnQAEBK pPRɏʿ-N1Cv̳|2tγ{#96iVrC=bSՇvaۭ Uޫ3&MD{6B7AY@T4b\(=kl9LicG'@a8:U:aB%< r€Dh)Mv-$8Mv !2*=q*l4Ti2n5!X1SH ICTXNt5 _z>\tG!"Wn<hղ ڰrW3@;*wɹ)0~۪vK A׭I=טּ&DZ $;?i `Zn"Y܀}(UO(ç/֠A`7jZ`. +5cXЛ$Q$ $MDU,gl_VY.]Ӆ-3(oŋu.\+N > !l9Q-+w\*{u~J-e0w-*ЙeOEOZw.3Msi* .Rgp.lv!`t 6뭒kw" !CB:-s(D!թ`ŝVa鴉D被&0@/|E>@0tss(kqk9Ў(W;n6VW;+ػru5NE_2;Ѱ\I~eWWj`MLJT%A0&CDq%AFfsM-#7$j@f)!Ta͸ gkqYF({;< b 'Cg9aFs4޻?j#w?}@FN]͝mV 2{hbr/:^ƺ^HX7&kP(Xz{JJT3/(=y}>dJ)Yos,.~J2{;eapM Ơ-)GGwIP*beS9gxJ61¦ NXcg,F,nP hwϦ]\ߵNK-pPYm'\E]BI)ֆrAJ rph9.R+OL85CEf⌑cY{/#I#*{K H{ k|qh\I(O9{Avd|9Pk:ՠU黫NYD5ұJîr[]i \[etMJʙf6A^q Bi=8E^:s?,LI2r}ɠtܜ4noN_މP R>ʞ,c:l=n뻠lzf;qgMvd M't5+oEуx;Kը ѬNHf >:=y;Ze~S;uW3L U;!i`ra>ޞSwzR׀ͯwUR9D?^^B ʌHQ ۃӿs(N Q,w@xDB/*S)3\9>W?Wf +M8ѓ]Z!J yȉ%V,CQ@a/-NʜN4ᄢY5td]H<.m^M~?R8z緓4^NO)Ԛ~;<^} 3b?ɝO?vv|0 ׃#2_x։7%Dk *UȖ fggWuzz{ XsFRe)CA IҐm&lj;l'ω)ܞ'9kڼoE/8 gU |GA ^:^H=b ei' <0b+õ !QK&31(A(zPƴuZd*JI" ST鵃(JKeT& 1hSAmxIy@IB7Y*v"jOn#)F]4jC='XƓˊ\c W^BfЬLV:ffTy+ȫy7gh_y7^9剰_, zLI4/Rj DSH3:\Y롕$ H5&w󽴤c) >+ Btփ6eKl , )EOڊߑ;}uZMny)l,p3Jݑ;$I-Q f0C}\Z_Q$AoMc3SƼybemȎ5 qX vn\lT!eQ!7tcXp\9c&bHciR2n)^̪T2EDSAG4]9\`8 <9 %ۻB {0bjFR$l}| ؝}u~9Ve@&T #MR^/4\bKA^Il>RᎬ0h:Q%V{6$`<Ph3ȘSƕN:ZVͳv/FV#-ً5bњwv3@bX$+Y$Go #'rZ'C}ž'xj5]yZ۶Էl+zW>tg gUWu-[=s(HA[w9(y0ZFmyãYJ= Qse%IFK#9Nh skc йYӕvN᭢CwΊ;+hkwl1ZdNH =f4 'S2zrB"ꮴ}_ԫ+mߗ/H[ĭ(۵崀3pI4%F?)#w %٠"EZ߀rv.J bo4ꃡZ1\ڦ`40+x0 !GIˍ')(U&$v[ Yo+-Pɠ5<$Z$XѦbfwF4g#1Bk6s) 7kaof_tZSTp4G8Må鑰! 2u}fԚqOC43ӡ+ڝp4zB|2&ncҁ܇eyyrM4$- 1*wY1fq%vĠUkgb= 9܁,͚n$7ޙఁ3G1¨%˪F1֙ 5׊| gGʢTx]LNZT(dֈE\%m{PNs@^lZ_jvv)AKtx| t nkf7n?aGkQ 3҂qevP:I񳛶(pыRa[{[3'`8pc[G 쨫WhC%lCC#Jx jVc y[VC*ydzg`TVhmdN1rS5mYB_}x5\ :J U r}w`{qֲv>o?kV7]YX@r'R1+Z%Q?ҲKMLhM2Mk8xIJOˋ#:WW߽8Gp\!ON #(ts~Inߗ-Biu 3+9~ ׺ \|wգOkf|`--LR&1firUl$Ћާ/cFq}EvuwQ^Mcb}v G<2%rq޹H;ZmotP7hgQ {wegX胃w!L8S/lzŚ0|n,WG `fTk k Fxӎ y9ɅN F͒[Ǣ8ȝ܎*Ԃ _b{(U=%:K兰i>HtlJs&g@ !QynBR[&im6ZJUJ5<0KQM.wާ7s6nۖ?,RX b Jwo"`9(;zݬ&ӓu(B^MQf nn#76vn@?shAw(/z[Mx2O[B,?K4V}^0zz<Ӛh{{3w-Z>i[o\Dg-vSBvA|G'v0ݝ~ڭZ EL>zsIi.1hԈNwtn"4ogݵvLRmCBqXpzgzkS\[h#wVK R[){5b%U1f0(`ȓO] )8+gΛ\M?7/`󁃲ݱ,M?&ooJbj'O^u룡b1Z?,Ҫd4\ߞڙJ\4r,<(Y(J+v gbY8xןɀˋ`Xfsp,a qL| ,ioz|-N0 "A"jg+ 3Ѭ%b$dٜB鋴ӣhdpM jyS_Ƒoa1!i1̩aPLBș5X8b5Զ([Ĭv6ZbSi'GC5>CX,=g9cͱ&VqCZ}Vءm޻[{l?c[ gn* ڊ `2z(E[+KZ=~>j{(5Wnplbo{XQ8n\Ya&VGFj݊/uebon>{;Jy*WYZmZ]v^p=,ռL-u6&fdlTFrĤEll?.S)]]or+ZʐFӋAoZKqO["HrSÕe]-廫MAh}r>u0׹h&CZQݠ2ǻQ8a9X+oM9q1W=ȿ"HUI)pJBEyœ<.=bY~誰* ]pؘ(qХ鲲zD^W"'"fFofۮR2w"L5O&[}˩J{"J!FROY#~jnY?Ɗ(Un@i@4VT$N]+DrbrtZC!x*ezc^ui^lb[lfvS;I\CES5DΉ=ٺޕIը ʙVaФ,1cZh3ʃ1I˹&"S69M*&Kh7C:u'S$'GuFcu{9KdJx-x nKh7C:e{Ou m=3sKF}0m UNQN9x5:rN#Y, ſ_̸=נgm *4"HJTpPٸl^UP$V.J*fDLc4l릷v!2V[2U І *oko tN'9z R[pBԦ)OQ{2c7ػiQ[΁0ƭjh6!-,;/_qz[_ ?:!]_D#6+?=/{TncYrx]d|g_-~}qW%~^&RVՙ5Y naɮYi8FYFh4h݌'mno @sAKG8rpw}rpӭp^e'ģVT:o2r*t5'X;ٌ`xKp\ΑV+S+hw}R c9Ȟ,<T8o| ?ѽ\Ѽ qxQ̓2ggۃ y'sL?a.4wYkXj,af\Y~j53v?sPk _΀K^pY}e{ڈ Q+>Q䳍~ǜ˗;Dn-;tәr|*:S~#O$(zTg;XDW 8iݖАo\Et=Χ:s m=3sKF:uoiiݖАo\ETg:\/\w\C [ f_>˴@Q D)P#-$L~Z,+^Qjѵd%X1{@BFUM͎KB^Nl9P[,@{4?jXD'jSLjIkZb ˎr] դуm ltt[P}>}||KV*17y55QGM=p-ᆠfܲE/ұCpbZ;Z^jO3zhY}=ke}iVV'5y$LtJ] ސ{u-1?/ ΐYKԻO]a,/Q$0Toffc⹌GP1:ge|jz0sx@sQYz}})νs[^<XͿK@[JS . 1gu2jR)Z'ʛ`&h|оC~\HkL:=o5 .b{f(QpPU5YTk@HUs Y"[ OބY7!/x]ccV{;Dn=Аo\Etjߪ#{ORnyMU})3 sE;O9-!2ب97W>٫3@l٠t@ṫY~iꨓ4)(;b<9ʬ%|h3dudvsvqu[*vd[[Wx3P~~5]{W_͟: C{ _ g~Ki`w%ae`܏yoWooo?;9굳}w7UGpyv>=A _p!(KxӮVw{q_a_=RL69ѬM*hi9XI6nRWO6cR!EHS(Ux{-Lα-ͧl9ɂZAOW\O=mAS /TA=,0x|1[W[ Wһw7#=%CϪwNyy(RqyjD<=R!^)Dp<=R$!:(u]n񱪠jT KJS>LE Sj(~G<ݳ6_֛/\e”C)TIb+{ ӮW=:9h&&kQB}fzVXxF-Mnt>c&tSi*Qe=+,A5ńLNUp*'GNSPʏw<uI_c(46S" &Ya Fo9~ǗoL?>@EøOu@bLSpvb=5@yYaJu^"0y7͛ۛ7O3 Zm0/:T܀[ ;''~ܚ8ar nDc} [;G>wlٓ PS( FǪ}{{H.CVRFW=ur%GNŽt5\ l[JyolT'PwkgV'T]STU Qt y}` A" o2"EPhjY^ɠU9Ls.Fu-ot4UxxtU\lcԝ ; %]5:XM!(!k&m1u7s +'6FJ+EJ jÈ X%TU]P;mYhm0^M"!j1[hP1*?j V׭ J9㍫%)=HH!%>Vt\9G[wf%;w=}@kglNnTIYvm\E2T9OPQV GAH$-F|I\ S*XiO$':ka;q4-¾z] *fC"rx}AN ̘x *'GM ڀxh`KAj qRNU+P<{]21)v&Xi5s΃sA*A] 5/Y>4gE-P8;{MI0DC4,)(DɳReDžd| ){Qt'[ 1ΌZ/|q"ƧwsiqQ-!(CALbrA,eq粔:3iV j[2l6ͪ0 xjr멈*k9@`GL [²Yē¥4k .?)9&IBSsvsŴߙB-;E;G YL6뉜-DyZX):DՓc8Q甭;:VئBx#rO:'♁>P3yktnV%FJˎ&1 i_۽xD<9s4t{X:s&tӞO1|zvq|V:L3Zm?XzfSh1Sj0<8iS@th*Ԟm93إ/s U,;vRb *8kvԣΣᮒNJ3MBigiϯxuGůh$RVos ouM7e@ת%9]/>vQ1|`t?Yûk#2ӏ\:+qװr<,$0翿~WE5_:ksOg7.>|zԏ㋖j~Jzn6/g?Sg׻}i3Ne=_=wg%شH/2u`.Yzy\0-4i?]]!=e9cMND!%TtࣉڗgmHe p-/ ~B Ev߯!E=!)*lԫ^KaȂa6(c NMzOV|Q>6-*%!9jmO}~cisFw4|TWmoRD-1Ups׶"(L;d:zKdM[zᆠ>W˯g}OsgWYPCb[-[ }aU/ɔ=%`b|^,i]#%ۣ%־]2ҭ,aniEkMJ56'Y QN'S }1 , ,Qgɐ,<\q ɒp=*!Yby1g0GAud$=Y|isFH\r VKڰGhNJDƞl>7sw,=0Ь&)_p?D(23`p;?0!0sˬ9߼}Svw&^_$F+T'࿛ݿtoaR,R {C`6d}7L]shڗ?ۊI#.AVFQ( )'S+[/2{r}At>E~}zq0ߦVm,~3.7<$3Y g~>)4NNQV($ggnNUUrZn}((׺ٍQiۑ~gFBr3-X l|섟x*HоD9 o~<,7q0޴͠?%G?+? V%ĭ/_{t͟AY1@BPߌ./yϬg@'frʔft}O b֛qnghh$Oʾ}eBkP\."}X AK̯V>&2|dRNHf~aZt5;9wOiK(-ؐW!Iis3R J,οTu#etdeX("OZߗ .)f|68xtbML&=*X-#5e:8dRl֘³h,1+F~Su!!C/cٱ`ѿ 67XWKӺYb8̿1LSd%k" a375dXnJ2yk07ׯ4^1ZiaсW g2%l˛ePjxZsoJugD}v9x9`#r3 x(wt.;f |oTצ<7VȍܲFD4H6I`dRDADTL,#yz5h[6uڷhNDő% ʑ,)dBc JmjP b !PkPi3 (Sp)K#lP.(ChȌ2@<| 됯̌r, n uΕΠdMR%SzLgd20:2$oeL*3rwEj%,[IKkleОsL n!KCqgs4?L5l9R+teWY+d0 %PtKǒ,b%Q2Ԫj5Lm|Ln%/xiEЪLe2%Z13^P@ldQ`Rܐeuq-B2yO* CXE8yȻQ $DXɗs 41rV\QGQyff?,GX).'(  7zG+NNy1#E9ג\k߱,qAzSC} do1{S:)]pݫ5S\ Dt: ZE>YZʿRJKڜ!cz)J*O| ,9K6Fdd4Eb!e+/'[EgT?Q+`FX[5!]\ / K~~E Bӳ?e Ұo2qAwmF@m|<n1C^ie )1ҳBrV5/B<͛yl iߔC j!4&sd&9Mzf5z'䑱/1za-rN /3y ޓ-pLA^> K"4ΐb׬0@ ^` HVh :'%dl yCؠf!W Clbl SA]c# Ac׬Jr^'P/D~1uv^ml?>2Ԗ۠#rk 2b*P(ckF u&>@aDUI?Uyhy*A;aA˚a.)U@6̍aeZ} ]Y*=!fg}e7/Blo2 LnbJDn&}IaΟٶgB Ў$sVTW?w] sHA\mlYw&`xOb=|ztUsKst>st ].[9˕yvte 1W%CoOu2eh0Kxae{,vE hy GWz2~I{V:ͧIIebcoU`Eㅁ8}=z͚w<Ѯ[Ԭ޹ο?J]-,e+Ͱq~Z+HnJ矟o7t^.Rި׆҈Vu|^ {+<(=3ddd7).z\^!qbD:2dQ9zUtMNrfr &AbҚk D79'R|SRiivy$:$$iu@^ O]}V!ڳwO] ke c"C<m{ b3k[A}Hⷰ_"AۍwYUTήU=pX䞣cEחQ{` (7`%빸3q'r?rdPL rosu-%]ckbAQ!9goWrWrG/j6'l"}iּ}vtƊ;߇g};j.(p *-^ƼZV3̋eA * 0ӻ 9raT~D6ߗ෠[@ˎP_+B˅]>Q;)`;6 ,fQ?6>NS&:LL>-L>-L>-L>m3~09nIDBx-GgRQhg)I "E rɥ.9νa\cxw?3_mEF!CҝӻI H,4sDLntVU{{|khS# =:-uқK*NB%`̸A.56 eϞWZٌDΆ[׮lXpbSjɻ{u~Gw7A kiVW\uH<<v7(,/pG̾kɻna.+XKTg ̟޻X @Y8*$3V 5W=QE%qĘher/[eUܒXzw73j?]:7YV"/KJE4|eQnRv[h5fb2٬C!G.IM.DYJ1A|u#{@-+rqV%Xu2S=cȂ:}E bL &lm`8=+L/JlI$4J&Y C rL85!)~J[DT4+Oə> t49ɐeI\37(s$n Q. Ȓy=qn`PlK!уa<(3*J \,ӧN?(H=upݨʬEA:.;tz!!Y18]0CJ2+4iC=Wg>odjdR{>wbg̛+9x֝:ӗL^~V㼊S&#*GԱ_jrr6GzJdma00!_\_i)8 كjL )G~IÄ읋V[I`#WJ)0}#agrfY 8߶,ENE\7m\x.O'):͈nsh҅}ze6Oi*r}y_9i[:0N$).ú^V&K);n  jfya EɒXɞ禯R|]=bd.I:(Z<<e#~aݱl(AߑXFaDҐA~уSԇպoeBLi]().Zr_|Gn_Bw? ^oЈD־ -JRs qHLxxot8aIXQME tٺt(/w=yUPE"Ѥd5~8KCZq YՐ RvﱄΘ@4`F;WwQ%r{BYc?"K2kw`P2V '5?'oWwsykZ(IZЪ&`8\jW2>r#ˮXv^7@)Hߒd0r>hnoIEt萼㠜çRV&hDZ:6x}^o7^RIR߃-8&QYqz ="PJKEa$g/iSЌQ>cqs7Ct22)3;׫Pvaa%'eV3_Z?t!2vv}c񼘦<4ΣC'TS_RQMVqj=2ϛӧ7;yc4BP6hN:E:uz&^J[JMb"i]6͢ѣ"KE%~hVAf]&i\^~0.3PV14b%Ksֵ ZT¬*5݊k6u׋Gzuݽȧǃs'彼VNfIM-V Iqaﻲ>ma!CJ۷W#rWO83Du{sw˟ޘKXAx"8sE<Β܏ۈT{Qɋϴ&q`s_@:Jpt"ZmAVXSz\>y˺Uh?<͡yſw-Ac< E+O`9H$c*P%kfL ..~nu4`\'g%Mg,:==;W|Sk}ѱ,~nu,kk4̂d}Wk]ЈKQn4r9 Eq PID(Lm\Ȓ-S~k Amۘn}4dD@MV%y((r/GFnT7uJ=<=i-tV|y7> 㞑Rl}Nj )n^SGwŌ ]*o2Cug'N=.K1&ɴpp$ʵV. S6l^<7x}M\ư|1i i̯ey^Xĝ4Y峧_>{gi1&ӛG:~CSTe?m_GcVS7Ij0gG<+w{?~}j/vyLR3K4zfIy.oCsDstumV}X/<ĵ{>epbؔ #RPVM/.V&[h*gH,UE[24Q'5Fmpkd7@x q/9_L1eV9oa6JxBn&'㗇XŽ^pPUZ'NӨt|E`)a=\Ŧm߁%BR!v׫o Q)V;dH,AdD ODE,iԂD|eWKt-J^R[oP"g0|4Cz,^I?v~Zr%P2[R(|QhN6qH]^""@[ιA&)KJ\P:Sf?ĺϴv'c36Z! QsHHdjCPKQoQ>xPEr͝]&@.ơI^Q훛z^]/#U%c^ 5Zln W,x w❈8=]mK ڊOI V>e5!J>quO&J;S rQ_[JSUZ*Ή:l {hÿ3<͗Mx$"v0$ Y|~ 2%֧YCiu pt^a/:sT?֫|\߯KL.OJVRfW_ov/礞s^0ѫj;0oJjhЋ%#xmCE?z ﹽVZ1cDfIt|en(bOTĢrNvGgoE~{|iJ钨_c%#^ZĒfAҰ5ޟ}<_2wl}'kY"/$](^yGO< AR]4d.wfQ~wW.1K`*2 B2+8{=rPD4,6Gbͥ\8Q!@;αyg$űs%k֍LmG<5bAOM!|]ѱI "!ci%,>ɺA7N]Dg1ػn$Wd1y) Cnd4& lۖܒvg#_dY#[r+KWU,VYUbmB#i:D gwf3bQ>=j1(%Nl6u>́2. T@Wsaj׍({7óhJYP*\AOUf(n1sZj:^t6W9ۅ;vÈgcz$ªuc FL.T&U͟.F) QJl[FyQB:H|}^Qs¹$#= R?`F|=eS2]w Z_˞V%$oP9l_EEh[RAl Y9c`o\Ko+VhUWl?QWDتJW< sOY 㻊A!lAQZqA! cYN ;!0U:{.PJJ5uinBU\jmVyIpQ &M`j}WffOh#e)Ս/samB>B)oIDor{)ԏ}!b?ȖDߚJbׄUA&Y#x+R}9LPppjwV *7XCվ˲-kLvӅmWCkYe}AOU/\L&"$bDKJ1]adwȝǻTWE2ߨoEӝ};+QzkitOUO+xྤ8I)7V`U+>|%n[<|Ew;~Ki!hڝRNˤt:k#tT2)<ٕ )p֢K^)R4{?I'~A+ShH_:}4p3d{.G3\\CnUEcrK*t$3'HqqӇL9!);YI75'L !:7R| YQ PSQ8ҡI |>7%**@5R7_ &韧jߜs[E{ocZVl޹מ]8<{x+/qY)[V,` 13`?;s7ǝퟝekZ3·nX|MyWhSuNèO'^ ۨ Ay6]x-F*^Tw557|}Bsu6i b!4]]ۛRK @Mep Q|2Be?'t *?-: :^n8Kk5 ^u[QL5p"JS@f vAKH8xw%$d{p eٵntj_ϗ1+M; NWRG%$^]M?O=ޞu]ĮS嶺.tU7z#Ia;vZ7jѵWZOm f(5ՠ3D%T$")[Ĕev~ 4u1 ri?%\Vh8x ZnJ'.b:b_6~%j c8 -ԍ[u` 9,;b<-0R)rZW_hݚ|3*A[>ֲEf碖+RfTPscl*ɬ; q lMLJ+r6r啔13J((&L0i ƛu̬ea22k |S\~k8UqUO>9 If8G\4e極3zG_&,g(3W % )"q6E QJp&Z >wP ;nؔm.K%:? `2 CMf,ꡍ`ݐ 3 PG7"xo:oE!epI!a_#}qREu/Í5jƾtXs3 ~eBEqg6_XFhx!EXS|3d38z-8?i.A4 #igc΁W qO_kdX 5T`c:]S/K!cnJ/&ZyܳoO.yK3?Ƶ2)tp QklhyzU4/#xDA'93 9ϧ-)JģېolKl; FG.U}ѱK&],>PrD*$uuhj&R-Vob"%8U~LdڠEH+_qB'1 m\J*h+ i+LJhxۼўn- >շ H.YPmo$m]n~}MѫnnYT7?v x.ߡSQZf ϪdV"1|bxTgG= Ͼ~ OG+1 ϗy Ry3l\w󓵬q9of MoY#ebtǀ^T{R/ G3f}lr&D{XZ_srήAHU e:;7Ĕ*Ƅa!cl= omY]]~mT^QZXd1殭Xi<5Eh9((3`F,"ܟϣcDD(>oԛ酽|j^N j }7liptܼ j={^fzH>Qw%p:'V 0XG{Y@ {{gƒW8}lm巏P.銵f4 d,ah.ʼON>~Ԏpj}+=ZփrPsï'ڍN*R*r$pPo w=Wk%w9BKӃ" =tG #۹)CQuwDG4hG#ȯi+%4YhM%XTL't 7" eM$RǼ,ɅV9MEAްi]ٝ8Nl͢j9]ghOl/ ,~ Ɯ .z^_C,!$x)>]YG,3׏oOFbr3SۺŷGξX sTNFfzؘj9~ee< 9!fw5ϟq!J C`xYt2߷tA EM :ZaZo)Q&EG(!ds㿐E(OxQhi8 U0 $ 3tEi^(yt MQ{G`ZQ(B atP+Ƽʳ)pPA2 SFW)͓|DQk/ON0͙lr6Ҫ;"M:nw #'Y3F h#Le"%]cQCLQn?TwN +*DFJto{.?? n4isAO?^lGg|)Us K~ 4b[lȇE6ޒ xW@wp]&ϓ@BZ+5S-e1=F_1^=i>J-TF7@Z8oTG+6I"3f!9A\^c qnŸ||||$|?̀4R[*UD odI脠,F-Ns^觼j drU؅/z3.1℣NrX ⿦;GHK\L?.B:1|2+/eJ|Q\#+q{ kehhk5nf]xD1Dڹo^geUg6qՊuJ<6x'ɵ\flWԭB OPQg r9|;ۀ9W;{SɃxS Up4]D(bVf5o-@/ITw=R!N{␯E;_&i#(k\8tA t}GGr"k-ݺWp*n}pW΢]x7(\ncn]1(ah#EPޤ[ڠҭYtgb#;Z:x>'!V lkT'-#+T:Q|c11[̡y{mwcv`lݼ|FI:{{uXMl^!Pj}hN^AV˱FyՖ)eMLYe L2ȃjhrӝgSsrA7Z~'KYZ{ZDJMr I,|L St|}wd6^F]clecezpzvr^^OPd} zTBђej:3v(n!mݥoD ]q&7'hthK=hVH> "Nt"Y%~smTAU8o5 Q `2mł|Qo*u~n'1| .niMtI$khӇ03&9!;!VHdCޡI܅(q DedQ NT]=%pht_MhR *D}<bMbp7ݗbF}( 9\-;~'i@HUL :IT'QM8' ︿q -l?o!Q/"К Ϛ(ˆeS"l| $s4qlbjg8lmWҒ2 DWURDLseW  _8+[6q.\s@;HI?&)y}72SE?o,`0&@65cB%XIVy]io|V1( mfn AY2|I#@.vsrVD x2A+"fRA 9(2%S`U`S?xʿvR%MU$xXfN$-4IC wl:WłS&ĉK f?JABr Fi{czٍxy,1P!/D@ĴDJR(( .=PQEΆvT8 d]P,=Kn<*niޟ]]QfVpq(q/հ#XiAjxQO_&.I?S. Ȋ@ռg ?%f:'wueuߩ~Gξz`NFf<JJxhK"o?Ʊ֍ P3A㡴 OfeûnPF8nAɖbV| ТLZ S"<*F  L٘3(2}B@nw;inm DB$2):RŘNM:!FӾ3l 1c\Ks+!)1$ DL1ɔg7,G$B,mt' 2ܣHN)Τ ~5"فg~EX 6'{Mu\ml !!6=K=s`,j+|)3NW}E6| 5lzA%E?]9ٙYf'uB(7yyDo}f7 btrT(5-_}*୸ez\cőA5G;Gzz(&C~Ny(?VtArձ+a'[v)F ^v0$otnf1mV783ӿN?D?AJO+cԔONOBLrZrC/gZ+aw%֌Pqiػ8vW ,A.-=X, ]O|n$gpPݾݶUU]vIUIDRVJ%yv*bc!d9CJ$ IN= qIW@e'nOk6%KJ{;s)H)Yא7" B Noi/a_~|ۣVp,4X?.p}rxS u"@RX!qzJv4WGla[VRĞCoBɒ?=6*6º50/gUulD&.h4Xo6Ng'S-t%OKC?WOn.Sgj)'j'SwA 6NH}+5|0/ښ wv5-l+i\-MZ]=9u[cL冁﫫;Nn֩W=˩? #Gܫ#ՇWfGv2ͬͿ gSύ.M0#-r }iXt9I(@ks>}+-#k&L2AlqIJ%$c!!< Y1/tz1&[!`2f4c hAZ_agb=VrnsW1+vV*)GAى JjG99oeW3NjA93aģ}'|2YuiX8;V]*yu + D 盟 kA+b2ٟ%ibΟRxA Z0.{D/Vـ䧅ja&I4fW*};  \RXhZV>ϒ"C BLTQVhl)+d jhpHym 0~R|KdL?$}vq۷W-]%`9)<_#~%>)#w.ϊ>[wٷ_k]̥]&ڜB$t)LAJTɗ>]d_.0oϯ˃wwGf륽˅Zzw(ZJ?ƴ ӿ-cجQ:̓=Z]7[ Q|`Ռq8lilr ӺrӡL=5 ʨhzhBDm}dJehw c*  QFϿOkG#1j\QᲣm}]Zdz/ae&K<^RB7ք>Ͽ76*r,ac(/ޕ4_F˥h"6]ݱUW,?X^S{Yoǘy-k]1*ZVaJ{j?Ś(W5=iM:%ƄQSnxH"Fc,?2h\rJQMe)hXWu%p|<(}"ȆPEAיgUꢱJ誇GLM4W&Z+odݚ 6VTFzZZoWl]vkJ,Wlhzy5_n<~5n!}.ߛ| peuo>|]~خ\nAe7,۵AvbgE2f`P\J/$U@4Cu;f'rA8^Ie^J,HjHCeP%f/t FȎ12k1Xn,Wf\-iSd0*yQk'oWOf_չ쫪kWd@E@g1'޳;H f9DswO-W{ܨpQT-1sbǛv ?\N` ]ЮiI8-/:w}vcC\{dA(_q/+6﫿RhbKi#c VDTLI)l6 e^GgKfݟj36 ˆL!tLX9j*K5L+o?I%` TK);6jb!+$6Pi [Җ%㚢NCT)w ,ABXfN~}xBԓhʃ#1ΞLZЧqO){hNi OX ֭5:Fv e*`< ֺ!?OzˏCn(st"72;i >@0?U"8ʉ ))*7 P%.$g4׊@2^+>/A;~=d*TlLa6?8YS'NI`1<_aWB=C:hFcaj͔\*oŸb.?Ta8qQ|%e'K% wi[ᚓI}h@}0K`{`0F ŭӳdW>8 Db#ץz0o41R 5].zrn4eaֻO+z\BnWf1weVzmdO_\W,;b ɡm_:午 ^Fk$3eWR:eZü$AT.DX4@5PI8n j7[uX_ixoHm!qX6NKO* Z2"ŕX+!Ȼwó\9Ĺ/imq2Y|Xp T2"X\FfvNh=6T%u@-c4o_WӫG Hk͉8RD҄ BO~f.J3ՊpzaeN%qHM.O`>jQe1m{fPiRxkS;)V? )zq}oID֟Y{=\?8:1y[>{?&lϾXJq|lӁ [AVe9ҸӁnJtP4jO"z `--bR{Tl62K퐓+*,u,e#6rrV4fϖ!ƴsM}a{ّ1ęd#48 Ku1ڠ3풶vBt{..:o"+{RB;6dd1-kMd*md<V d dسHqځ&Cy\/CZM %>w}:2(р0|o09;KQ:^{jq&BaHZ(IYiSit}e&V 0Eu΋.lU&'"g #/VJS]1[[!Ãd4'ж&f̬<%kBH_7ܷa'lM7' @S"UŜ@xEP&1$ZUGҠaYN{ZDˊq{ &]"PfW1 X0/|i5##;?ةDLjlgt*9К·_s3@Y铍!O՞@fpB/WL1 ڂ*D:H}G-2oG$OD\Nt 궋n "'$D+ >ٶ :&;wncg&m&oRjnK%gWo <>3쳸XEI.ٕH2kJ/K4$丛6V{'.zؙ(SM^Fb 2*;R Ng߷68v6TE*c:Q\3m 3bŝ#u6 4{;o.Ś5{n0ֿS 5Ipʓ _/Iwa'!rC qBbȑO/J( +K)Sz9P8:V 3f91??m;= I⟈b T޴Hcݐ&Ti,zY ZNo\bo=gQ*E #5;"6a8xq~7u|9GtYAjF6͆,T'_Hztq0ƿ i'HR%Fvwho}q{ձS9XBoc8!*JY}js=:vnݰf5ө6WWٶ˚s1h8#jnbg<}bJNB/{TJʡu#ݯi 3<+r98!w߳B͸PeSm[f|TyL63]87Uø2vo0w]-cl~}DˇYjD5ʱ]hI rj_59CX ҵ$/0wE >$|wJםqi,N=^6=t9۰{+O*o4r)o|ZRv `3(E V+b0ǒyHcgXW Gij.d/4[X=31$~C[ =lGm,Xn׍~t"YϻZP=We/M·[hHx3;okA$p 6Ʃzr>ܧ ma+ԁ 1_N =󰣡+JHVz9̯:*娬 WrYGmE~u;YYWM&ia}w1 kYmtaFsrYP(QXCx#?X:/<7/?]imUYi~k)\vm"[FU[Q>VBhGqQ/㺫Xtք~ጣKPD%3=Q{V#Pv=XknX/,8LzBRF2Yc.$d5mRB?1BiglώG)doox| {]~%%x W{o}Q>?$Ś3,x ,t힨HTx"Bn٩[rTdphNT' ِcqޥ 埗ɞ۵+R+iиVЎiM;/dI5PrabOH!stLLJl(?}lY{?0.NsDR(mZeVY8N'qܩtkmҘT(́"E ?:!$ $ /e%DNNNC8CTI =faL>/ :>TA6V#mJR3Jl> jRW5@9^x"Ec_--y ͥ6qy *F~QE껎kN{MQ=BGL.(J3M^\0`P`1NE< 'ԉlQ@3}tXsǛzm*yYև/h,~ǝ#`d6!b/~!88y;0#T񾀍R~]>^|x;x}үX]<߷SfJ#]e+Qp! pa<ٗ? NJ I~3K:6N;ffpjHT| 8M@}`u%kZ&8@*ӆ1KݥilA )=R#)P5c JTZۉzpj]`i) K5צ`e ]7:qȯ";jNFC8,|vw"8_Jt 3JC-kX lc0=ҜfX6T Rm ?Zl -v2(?'JyT3-F)sR8RTP: kqDiQt'PZ=9QJPJ=ՌQrRRJ\,@6T3F)ca(e.#6SM8w Jj? yҵ],j6D\ HY r*Ɉ4N dRv51e)~hzG(Y5e>lmA:Hr?Z-ou$(+jF5ޫuqO!~Y~cC @9)TV}>x;mЉHYLӳנPJx:8=Z1 }` ř\TIh00J%)')j% QW-1X*,~ӓBR+ܝj?9a_v괽JwaPWfw~]Woq~=D,^Ζٖω:Q4-YEQ#H@F>$p \0nl2T 8}G+-j^<_G0V(-˾ [|x:s9c;0i&n:,^費'^؟\'y?xITKآQoaݭ&It]'EixGsE.`77Zg./+fMq_8骰@ -ؤTf.$Jy >Eou:"}ލ iTaɌ0ˊF7n`(cq0rP$su91xpQ>5kaO|)X?S}2|KΔyH\k;';˵h!Pȍ,ԢbHIXځjE7"fO[jEYX[\wxeW_/&F]>CDi|>J5ϜW+תw ^b6`\isMmFo|*n=!!ZSuP[]@;vngA4V,л!5LZRF]B˒j&.*J b\k)u.{5$ʋɥM#TveucX^ .2&^a}0ƹ(M`۩S I?hMC)f0z%!v0RmTQ$=(/"hJy`TS-F) <ԍs& =z۩S1XшT֩Ǯ0%iJQ=GvF|#>(e-"( kp|Of9Q: co3JYqnWnzN5oF)a(.cLF0zQe:Hvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004540222115140325016017672 0ustar rootrootFeb 03 07:10:19 crc systemd[1]: Starting Kubernetes Kubelet... Feb 03 07:10:19 crc restorecon[4697]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 03 07:10:19 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:20 crc restorecon[4697]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 03 07:10:20 crc restorecon[4697]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Feb 03 07:10:21 crc kubenswrapper[4708]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 03 07:10:21 crc kubenswrapper[4708]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Feb 03 07:10:21 crc kubenswrapper[4708]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 03 07:10:21 crc kubenswrapper[4708]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 03 07:10:21 crc kubenswrapper[4708]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Feb 03 07:10:21 crc kubenswrapper[4708]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.750698 4708 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757861 4708 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757883 4708 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757890 4708 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757895 4708 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757900 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757906 4708 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757911 4708 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757917 4708 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757922 4708 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757938 4708 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757943 4708 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757948 4708 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757953 4708 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757957 4708 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757962 4708 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757968 4708 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757973 4708 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757978 4708 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757986 4708 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757993 4708 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.757999 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758005 4708 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758010 4708 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758016 4708 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758022 4708 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758028 4708 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758033 4708 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758038 4708 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758043 4708 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758048 4708 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758054 4708 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758060 4708 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758065 4708 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758070 4708 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758075 4708 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758080 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758086 4708 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758091 4708 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758096 4708 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758100 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758105 4708 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758110 4708 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758115 4708 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758119 4708 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758124 4708 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758129 4708 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758133 4708 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758138 4708 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758145 4708 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758151 4708 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758157 4708 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758163 4708 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758169 4708 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758174 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758179 4708 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758183 4708 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758188 4708 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758193 4708 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758197 4708 feature_gate.go:330] unrecognized feature gate: Example Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758203 4708 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758208 4708 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758213 4708 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758217 4708 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758223 4708 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758228 4708 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758233 4708 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758238 4708 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758243 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758247 4708 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758253 4708 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.758260 4708 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761034 4708 flags.go:64] FLAG: --address="0.0.0.0" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761057 4708 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761067 4708 flags.go:64] FLAG: --anonymous-auth="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761075 4708 flags.go:64] FLAG: --application-metrics-count-limit="100" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761082 4708 flags.go:64] FLAG: --authentication-token-webhook="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761089 4708 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761098 4708 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761106 4708 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761112 4708 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761118 4708 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761124 4708 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761131 4708 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761136 4708 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761142 4708 flags.go:64] FLAG: --cgroup-root="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761148 4708 flags.go:64] FLAG: --cgroups-per-qos="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761154 4708 flags.go:64] FLAG: --client-ca-file="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761159 4708 flags.go:64] FLAG: --cloud-config="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761164 4708 flags.go:64] FLAG: --cloud-provider="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761169 4708 flags.go:64] FLAG: --cluster-dns="[]" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761179 4708 flags.go:64] FLAG: --cluster-domain="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761185 4708 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761191 4708 flags.go:64] FLAG: --config-dir="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761197 4708 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761203 4708 flags.go:64] FLAG: --container-log-max-files="5" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761210 4708 flags.go:64] FLAG: --container-log-max-size="10Mi" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761216 4708 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761222 4708 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761228 4708 flags.go:64] FLAG: --containerd-namespace="k8s.io" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761234 4708 flags.go:64] FLAG: --contention-profiling="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761239 4708 flags.go:64] FLAG: --cpu-cfs-quota="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761245 4708 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761251 4708 flags.go:64] FLAG: --cpu-manager-policy="none" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761256 4708 flags.go:64] FLAG: --cpu-manager-policy-options="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761263 4708 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761269 4708 flags.go:64] FLAG: --enable-controller-attach-detach="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761274 4708 flags.go:64] FLAG: --enable-debugging-handlers="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761280 4708 flags.go:64] FLAG: --enable-load-reader="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761286 4708 flags.go:64] FLAG: --enable-server="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761291 4708 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761299 4708 flags.go:64] FLAG: --event-burst="100" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761305 4708 flags.go:64] FLAG: --event-qps="50" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761310 4708 flags.go:64] FLAG: --event-storage-age-limit="default=0" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761316 4708 flags.go:64] FLAG: --event-storage-event-limit="default=0" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761321 4708 flags.go:64] FLAG: --eviction-hard="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761328 4708 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761334 4708 flags.go:64] FLAG: --eviction-minimum-reclaim="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761339 4708 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761345 4708 flags.go:64] FLAG: --eviction-soft="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761351 4708 flags.go:64] FLAG: --eviction-soft-grace-period="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761356 4708 flags.go:64] FLAG: --exit-on-lock-contention="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761362 4708 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761368 4708 flags.go:64] FLAG: --experimental-mounter-path="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761374 4708 flags.go:64] FLAG: --fail-cgroupv1="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761380 4708 flags.go:64] FLAG: --fail-swap-on="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761386 4708 flags.go:64] FLAG: --feature-gates="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761393 4708 flags.go:64] FLAG: --file-check-frequency="20s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761399 4708 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761405 4708 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761411 4708 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761416 4708 flags.go:64] FLAG: --healthz-port="10248" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761422 4708 flags.go:64] FLAG: --help="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761428 4708 flags.go:64] FLAG: --hostname-override="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761433 4708 flags.go:64] FLAG: --housekeeping-interval="10s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761439 4708 flags.go:64] FLAG: --http-check-frequency="20s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761445 4708 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761450 4708 flags.go:64] FLAG: --image-credential-provider-config="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761455 4708 flags.go:64] FLAG: --image-gc-high-threshold="85" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761461 4708 flags.go:64] FLAG: --image-gc-low-threshold="80" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761467 4708 flags.go:64] FLAG: --image-service-endpoint="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761472 4708 flags.go:64] FLAG: --kernel-memcg-notification="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761477 4708 flags.go:64] FLAG: --kube-api-burst="100" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761483 4708 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761489 4708 flags.go:64] FLAG: --kube-api-qps="50" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761496 4708 flags.go:64] FLAG: --kube-reserved="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761501 4708 flags.go:64] FLAG: --kube-reserved-cgroup="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761507 4708 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761512 4708 flags.go:64] FLAG: --kubelet-cgroups="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761518 4708 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761524 4708 flags.go:64] FLAG: --lock-file="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761530 4708 flags.go:64] FLAG: --log-cadvisor-usage="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761535 4708 flags.go:64] FLAG: --log-flush-frequency="5s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761541 4708 flags.go:64] FLAG: --log-json-info-buffer-size="0" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761550 4708 flags.go:64] FLAG: --log-json-split-stream="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761556 4708 flags.go:64] FLAG: --log-text-info-buffer-size="0" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761562 4708 flags.go:64] FLAG: --log-text-split-stream="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761567 4708 flags.go:64] FLAG: --logging-format="text" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761573 4708 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761579 4708 flags.go:64] FLAG: --make-iptables-util-chains="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761585 4708 flags.go:64] FLAG: --manifest-url="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761590 4708 flags.go:64] FLAG: --manifest-url-header="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761597 4708 flags.go:64] FLAG: --max-housekeeping-interval="15s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761603 4708 flags.go:64] FLAG: --max-open-files="1000000" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761610 4708 flags.go:64] FLAG: --max-pods="110" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761616 4708 flags.go:64] FLAG: --maximum-dead-containers="-1" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761621 4708 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761627 4708 flags.go:64] FLAG: --memory-manager-policy="None" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761632 4708 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761638 4708 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761644 4708 flags.go:64] FLAG: --node-ip="192.168.126.11" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761650 4708 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761665 4708 flags.go:64] FLAG: --node-status-max-images="50" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761671 4708 flags.go:64] FLAG: --node-status-update-frequency="10s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761678 4708 flags.go:64] FLAG: --oom-score-adj="-999" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761685 4708 flags.go:64] FLAG: --pod-cidr="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761691 4708 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761708 4708 flags.go:64] FLAG: --pod-manifest-path="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761715 4708 flags.go:64] FLAG: --pod-max-pids="-1" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761722 4708 flags.go:64] FLAG: --pods-per-core="0" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761729 4708 flags.go:64] FLAG: --port="10250" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761743 4708 flags.go:64] FLAG: --protect-kernel-defaults="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761750 4708 flags.go:64] FLAG: --provider-id="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761757 4708 flags.go:64] FLAG: --qos-reserved="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761763 4708 flags.go:64] FLAG: --read-only-port="10255" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761770 4708 flags.go:64] FLAG: --register-node="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761778 4708 flags.go:64] FLAG: --register-schedulable="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761785 4708 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761828 4708 flags.go:64] FLAG: --registry-burst="10" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761835 4708 flags.go:64] FLAG: --registry-qps="5" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761842 4708 flags.go:64] FLAG: --reserved-cpus="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761848 4708 flags.go:64] FLAG: --reserved-memory="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761855 4708 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761861 4708 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761867 4708 flags.go:64] FLAG: --rotate-certificates="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761872 4708 flags.go:64] FLAG: --rotate-server-certificates="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761878 4708 flags.go:64] FLAG: --runonce="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761883 4708 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761890 4708 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761896 4708 flags.go:64] FLAG: --seccomp-default="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761903 4708 flags.go:64] FLAG: --serialize-image-pulls="true" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761910 4708 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761917 4708 flags.go:64] FLAG: --storage-driver-db="cadvisor" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761923 4708 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761931 4708 flags.go:64] FLAG: --storage-driver-password="root" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761937 4708 flags.go:64] FLAG: --storage-driver-secure="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761944 4708 flags.go:64] FLAG: --storage-driver-table="stats" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761951 4708 flags.go:64] FLAG: --storage-driver-user="root" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761957 4708 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761968 4708 flags.go:64] FLAG: --sync-frequency="1m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761975 4708 flags.go:64] FLAG: --system-cgroups="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761982 4708 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.761993 4708 flags.go:64] FLAG: --system-reserved-cgroup="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762000 4708 flags.go:64] FLAG: --tls-cert-file="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762006 4708 flags.go:64] FLAG: --tls-cipher-suites="[]" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762016 4708 flags.go:64] FLAG: --tls-min-version="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762022 4708 flags.go:64] FLAG: --tls-private-key-file="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762030 4708 flags.go:64] FLAG: --topology-manager-policy="none" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762037 4708 flags.go:64] FLAG: --topology-manager-policy-options="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762043 4708 flags.go:64] FLAG: --topology-manager-scope="container" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762051 4708 flags.go:64] FLAG: --v="2" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762060 4708 flags.go:64] FLAG: --version="false" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762068 4708 flags.go:64] FLAG: --vmodule="" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762075 4708 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.762081 4708 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762207 4708 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762214 4708 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762220 4708 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762225 4708 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762230 4708 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762236 4708 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762241 4708 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762246 4708 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762251 4708 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762256 4708 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762262 4708 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762267 4708 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762272 4708 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762277 4708 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762284 4708 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762290 4708 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762299 4708 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762304 4708 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762309 4708 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762313 4708 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762319 4708 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762323 4708 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762328 4708 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762335 4708 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762340 4708 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762345 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762350 4708 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762355 4708 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762361 4708 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762366 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762371 4708 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762376 4708 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762381 4708 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762386 4708 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762391 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762395 4708 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762400 4708 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762405 4708 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762409 4708 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762414 4708 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762419 4708 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762424 4708 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762429 4708 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762434 4708 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762439 4708 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762443 4708 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762449 4708 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762455 4708 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762463 4708 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762468 4708 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762473 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762478 4708 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762484 4708 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762490 4708 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762495 4708 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762500 4708 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762505 4708 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762510 4708 feature_gate.go:330] unrecognized feature gate: Example Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762514 4708 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762519 4708 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762524 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762529 4708 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762535 4708 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762541 4708 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762553 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762564 4708 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762570 4708 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762578 4708 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762586 4708 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762634 4708 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.762642 4708 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.763532 4708 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.774505 4708 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.774533 4708 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774610 4708 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774619 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774624 4708 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774628 4708 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774632 4708 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774637 4708 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774640 4708 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774645 4708 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774648 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774652 4708 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774656 4708 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774659 4708 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774663 4708 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774667 4708 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774672 4708 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774676 4708 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774680 4708 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774684 4708 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774687 4708 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774691 4708 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774695 4708 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774698 4708 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774702 4708 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774705 4708 feature_gate.go:330] unrecognized feature gate: Example Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774709 4708 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774712 4708 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774715 4708 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774719 4708 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774722 4708 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774726 4708 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774729 4708 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774733 4708 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774736 4708 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774741 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774746 4708 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774749 4708 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774753 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774756 4708 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774760 4708 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774763 4708 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774767 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774771 4708 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774774 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774778 4708 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774782 4708 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774785 4708 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774789 4708 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774804 4708 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774808 4708 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774812 4708 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774815 4708 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774822 4708 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774825 4708 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774829 4708 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774832 4708 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774836 4708 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774840 4708 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774843 4708 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774847 4708 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774850 4708 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774854 4708 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774857 4708 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774862 4708 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774868 4708 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774872 4708 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774877 4708 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774882 4708 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774886 4708 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774889 4708 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774894 4708 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.774900 4708 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.774907 4708 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775041 4708 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775053 4708 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775058 4708 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775063 4708 feature_gate.go:330] unrecognized feature gate: Example Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775068 4708 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775072 4708 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775076 4708 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775080 4708 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775085 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775090 4708 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775095 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775099 4708 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775103 4708 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775108 4708 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775114 4708 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775118 4708 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775123 4708 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775128 4708 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775132 4708 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775136 4708 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775142 4708 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775147 4708 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775152 4708 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775156 4708 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775161 4708 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775167 4708 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775173 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775178 4708 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775183 4708 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775188 4708 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775193 4708 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775200 4708 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775204 4708 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775208 4708 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775214 4708 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775218 4708 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775222 4708 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775225 4708 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775229 4708 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775232 4708 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775236 4708 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775240 4708 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775243 4708 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775246 4708 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775251 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775254 4708 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775258 4708 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775261 4708 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775265 4708 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775268 4708 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775272 4708 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775277 4708 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775281 4708 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775285 4708 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775290 4708 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775294 4708 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775298 4708 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775302 4708 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775305 4708 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775309 4708 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775312 4708 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775316 4708 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775320 4708 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775324 4708 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775327 4708 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775331 4708 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775334 4708 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775338 4708 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775341 4708 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775345 4708 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.775349 4708 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.775355 4708 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.775552 4708 server.go:940] "Client rotation is on, will bootstrap in background" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.781240 4708 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.781333 4708 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.782841 4708 server.go:997] "Starting client certificate rotation" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.782863 4708 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.783913 4708 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-22 09:08:14.056046917 +0000 UTC Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.784009 4708 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.817596 4708 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 03 07:10:21 crc kubenswrapper[4708]: E0203 07:10:21.820872 4708 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.822179 4708 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.872668 4708 log.go:25] "Validated CRI v1 runtime API" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.951848 4708 log.go:25] "Validated CRI v1 image API" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.955076 4708 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.960277 4708 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-02-03-07-06-15-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.960372 4708 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.986071 4708 manager.go:217] Machine: {Timestamp:2026-02-03 07:10:21.984246942 +0000 UTC m=+0.966193759 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:d4343d62-2ace-40c7-95b1-99d083ef1c91 BootID:05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:5c:98:9b Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:5c:98:9b Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:1b:f5:f3 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:4f:59:ee Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:e2:dc:27 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:8c:7f:e5 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:ca:1f:3a:0b:9a:07 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:d6:ba:b4:be:f2:08 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.986381 4708 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.986931 4708 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.987453 4708 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.987646 4708 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.987683 4708 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.987993 4708 topology_manager.go:138] "Creating topology manager with none policy" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.988005 4708 container_manager_linux.go:303] "Creating device plugin manager" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.988490 4708 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.988520 4708 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.988710 4708 state_mem.go:36] "Initialized new in-memory state store" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.989215 4708 server.go:1245] "Using root directory" path="/var/lib/kubelet" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.992531 4708 kubelet.go:418] "Attempting to sync node with API server" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.992557 4708 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.992607 4708 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.992626 4708 kubelet.go:324] "Adding apiserver pod source" Feb 03 07:10:21 crc kubenswrapper[4708]: I0203 07:10:21.992641 4708 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.998120 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:21 crc kubenswrapper[4708]: E0203 07:10:21.998412 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:21 crc kubenswrapper[4708]: W0203 07:10:21.998218 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:21 crc kubenswrapper[4708]: E0203 07:10:21.999058 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.000830 4708 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.002565 4708 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.010152 4708 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015521 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015571 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015583 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015591 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015602 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015609 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015617 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015628 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015636 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015654 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015688 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.015699 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.016817 4708 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.017405 4708 server.go:1280] "Started kubelet" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.017880 4708 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Feb 03 07:10:22 crc systemd[1]: Started Kubernetes Kubelet. Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.019609 4708 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.021072 4708 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.024907 4708 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.028621 4708 server.go:460] "Adding debug handlers to kubelet server" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.036279 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.036322 4708 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.036451 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 11:03:55.009081192 +0000 UTC Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.036983 4708 volume_manager.go:287] "The desired_state_of_world populator starts" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.037159 4708 volume_manager.go:289] "Starting Kubelet Volume Manager" Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.037550 4708 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.038016 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="200ms" Feb 03 07:10:22 crc kubenswrapper[4708]: W0203 07:10:22.038026 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.038548 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.038078 4708 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.038665 4708 factory.go:55] Registering systemd factory Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.044871 4708 factory.go:221] Registration of the systemd container factory successfully Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.039217 4708 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.53:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1890aaf4d1553bbe default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-03 07:10:22.01737107 +0000 UTC m=+0.999317877,LastTimestamp:2026-02-03 07:10:22.01737107 +0000 UTC m=+0.999317877,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.046359 4708 factory.go:153] Registering CRI-O factory Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.046400 4708 factory.go:221] Registration of the crio container factory successfully Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.047916 4708 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.048036 4708 factory.go:103] Registering Raw factory Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.048071 4708 manager.go:1196] Started watching for new ooms in manager Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050342 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050401 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050414 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050427 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050447 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050458 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050468 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050480 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050493 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050504 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050519 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050534 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050548 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050567 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050582 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050596 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050612 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050622 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050638 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050647 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050657 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050668 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050678 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050687 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050699 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050711 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050727 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050738 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050749 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050760 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050770 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050782 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050812 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050825 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050834 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050844 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050853 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050879 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050891 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050890 4708 manager.go:319] Starting recovery of all containers Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.050901 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.051844 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052505 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052538 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052553 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052567 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052577 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052588 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052599 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052609 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052619 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052629 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052639 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052656 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052668 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052679 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052694 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052710 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052728 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052742 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052755 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052770 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052785 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052818 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052835 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052849 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052863 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052878 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052894 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052907 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052920 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052936 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052951 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052967 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.052995 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053009 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053023 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053039 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053053 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053067 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053089 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053102 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053117 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053131 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053144 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053160 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053176 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053190 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053204 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053220 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053236 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053250 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053263 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053276 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053290 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053305 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053321 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053336 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053351 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053364 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053377 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053391 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053404 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053416 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053428 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053447 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053507 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053521 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053534 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053549 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053563 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053577 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053594 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053609 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053621 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053636 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053650 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053664 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053677 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053689 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053703 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053716 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053735 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053748 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053760 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053775 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053788 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053822 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053837 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053851 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053861 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053871 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053881 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053907 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053923 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053937 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053950 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053961 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053976 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.053987 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054000 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054011 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054021 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054030 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054039 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054048 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054057 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054067 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054077 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054087 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054096 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054105 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054115 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054124 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054135 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054145 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054154 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054164 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054180 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054190 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054201 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054221 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054232 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054242 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054252 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054264 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054274 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054284 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.054296 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.058640 4708 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059094 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059135 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059153 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059169 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059182 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059196 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059209 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059224 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059239 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059252 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059265 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059277 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059292 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059305 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059317 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059328 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059345 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059361 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059372 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059384 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059398 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059410 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059422 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059433 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059445 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059459 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059479 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059496 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059511 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059524 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059536 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059548 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059562 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059575 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059587 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059598 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059609 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059623 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059635 4708 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059645 4708 reconstruct.go:97] "Volume reconstruction finished" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.059654 4708 reconciler.go:26] "Reconciler: start to sync state" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.082100 4708 manager.go:324] Recovery completed Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.089240 4708 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.091455 4708 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.091578 4708 status_manager.go:217] "Starting to sync pod status with apiserver" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.091629 4708 kubelet.go:2335] "Starting kubelet main sync loop" Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.091742 4708 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Feb 03 07:10:22 crc kubenswrapper[4708]: W0203 07:10:22.093833 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.093930 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.094877 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.096440 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.096488 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.096501 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.097567 4708 cpu_manager.go:225] "Starting CPU manager" policy="none" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.097588 4708 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.097623 4708 state_mem.go:36] "Initialized new in-memory state store" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.118071 4708 policy_none.go:49] "None policy: Start" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.119632 4708 memory_manager.go:170] "Starting memorymanager" policy="None" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.119680 4708 state_mem.go:35] "Initializing new in-memory state store" Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.139584 4708 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.169520 4708 manager.go:334] "Starting Device Plugin manager" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.170081 4708 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.170225 4708 server.go:79] "Starting device plugin registration server" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.171027 4708 eviction_manager.go:189] "Eviction manager: starting control loop" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.171208 4708 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.171925 4708 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.172601 4708 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.172773 4708 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.181143 4708 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.192381 4708 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.192589 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.193888 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.193933 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.193967 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.194204 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.194394 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.194432 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.195552 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.195579 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.195588 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.195647 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.195697 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.195712 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.195943 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.196145 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.196221 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.197094 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.197169 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.197202 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.197229 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.197244 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.197251 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.197482 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.198326 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.198395 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.198672 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.198748 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.198762 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.199107 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.199281 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.199318 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.199826 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.199886 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.199900 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.200345 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.200386 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.200402 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.200501 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.200521 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.200532 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.200865 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.201105 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.201823 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.201851 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.201862 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.240111 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="400ms" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262174 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262213 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262235 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262256 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262273 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262291 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262306 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262322 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262337 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262377 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262410 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262429 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262449 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262477 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.262510 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.272065 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.273507 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.273544 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.273558 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.273583 4708 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.274280 4708 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.53:6443: connect: connection refused" node="crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363638 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363693 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363751 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363769 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363786 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363830 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363862 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363834 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363888 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363903 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363932 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363917 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363901 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363968 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363984 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363970 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364029 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364029 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.363998 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364014 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364090 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364113 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364145 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364173 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364149 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364195 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364207 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364226 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364258 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.364178 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.474576 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.476003 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.476054 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.476067 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.476097 4708 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.476660 4708 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.53:6443: connect: connection refused" node="crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.522339 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.531434 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.551509 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.566864 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.573864 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 07:10:22 crc kubenswrapper[4708]: W0203 07:10:22.610827 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-2965352022dff714f654a2894ef0ddaa6de6f1609f86e00bc3e3b61e2830d802 WatchSource:0}: Error finding container 2965352022dff714f654a2894ef0ddaa6de6f1609f86e00bc3e3b61e2830d802: Status 404 returned error can't find the container with id 2965352022dff714f654a2894ef0ddaa6de6f1609f86e00bc3e3b61e2830d802 Feb 03 07:10:22 crc kubenswrapper[4708]: W0203 07:10:22.614330 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-5487e6c4e3d106006bccb3902e9670fcba85060f84e1ddb1fdb557d5e6167d6f WatchSource:0}: Error finding container 5487e6c4e3d106006bccb3902e9670fcba85060f84e1ddb1fdb557d5e6167d6f: Status 404 returned error can't find the container with id 5487e6c4e3d106006bccb3902e9670fcba85060f84e1ddb1fdb557d5e6167d6f Feb 03 07:10:22 crc kubenswrapper[4708]: W0203 07:10:22.619536 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-6529b02655cc1dabbd206875e4bee0ba07bec4a83350527b8e25fa141ce773c7 WatchSource:0}: Error finding container 6529b02655cc1dabbd206875e4bee0ba07bec4a83350527b8e25fa141ce773c7: Status 404 returned error can't find the container with id 6529b02655cc1dabbd206875e4bee0ba07bec4a83350527b8e25fa141ce773c7 Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.641363 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="800ms" Feb 03 07:10:22 crc kubenswrapper[4708]: W0203 07:10:22.868520 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.869110 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.877121 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.878553 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.878603 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.878613 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:22 crc kubenswrapper[4708]: I0203 07:10:22.878644 4708 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 03 07:10:22 crc kubenswrapper[4708]: E0203 07:10:22.879227 4708 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.53:6443: connect: connection refused" node="crc" Feb 03 07:10:23 crc kubenswrapper[4708]: W0203 07:10:23.016598 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:23 crc kubenswrapper[4708]: E0203 07:10:23.016681 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.026469 4708 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.037583 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 05:07:35.14858329 +0000 UTC Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.099635 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6529b02655cc1dabbd206875e4bee0ba07bec4a83350527b8e25fa141ce773c7"} Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.102548 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5487e6c4e3d106006bccb3902e9670fcba85060f84e1ddb1fdb557d5e6167d6f"} Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.104022 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2965352022dff714f654a2894ef0ddaa6de6f1609f86e00bc3e3b61e2830d802"} Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.105123 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c7daaca7c50aa63df1dbab5e46734bdef81b51eae2c43fba65ee55a2f9a8dc03"} Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.106095 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0c2286290ed0fc07b5aeee0f7f0996987ca5b2128d85688edadf40b81cffbe7e"} Feb 03 07:10:23 crc kubenswrapper[4708]: E0203 07:10:23.136012 4708 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.53:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1890aaf4d1553bbe default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-03 07:10:22.01737107 +0000 UTC m=+0.999317877,LastTimestamp:2026-02-03 07:10:22.01737107 +0000 UTC m=+0.999317877,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 03 07:10:23 crc kubenswrapper[4708]: W0203 07:10:23.181710 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:23 crc kubenswrapper[4708]: E0203 07:10:23.181811 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:23 crc kubenswrapper[4708]: W0203 07:10:23.303445 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:23 crc kubenswrapper[4708]: E0203 07:10:23.303561 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:23 crc kubenswrapper[4708]: E0203 07:10:23.442115 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="1.6s" Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.679911 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.681504 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.681560 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.681573 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.681601 4708 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 03 07:10:23 crc kubenswrapper[4708]: E0203 07:10:23.682273 4708 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.53:6443: connect: connection refused" node="crc" Feb 03 07:10:23 crc kubenswrapper[4708]: I0203 07:10:23.862120 4708 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 03 07:10:23 crc kubenswrapper[4708]: E0203 07:10:23.863302 4708 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.026491 4708 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.037805 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 00:24:16.203996994 +0000 UTC Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.110551 4708 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442" exitCode=0 Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.110631 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442"} Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.110707 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.112201 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.112252 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.112269 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.114680 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60"} Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.114753 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1"} Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.117138 4708 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0" exitCode=0 Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.117223 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0"} Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.117302 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.118535 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.118566 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.118574 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.120424 4708 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c" exitCode=0 Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.120516 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c"} Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.120549 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.121580 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.121618 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.121630 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.122459 4708 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="04ceb2c410480eae93a9968759183b1db1300b005b3904807c7b6393f7866ae6" exitCode=0 Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.122502 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"04ceb2c410480eae93a9968759183b1db1300b005b3904807c7b6393f7866ae6"} Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.122525 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.124005 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.124075 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.124089 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.124533 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.125289 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.125322 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:24 crc kubenswrapper[4708]: I0203 07:10:24.125332 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.026634 4708 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.038843 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 16:52:08.825452624 +0000 UTC Feb 03 07:10:25 crc kubenswrapper[4708]: E0203 07:10:25.043741 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="3.2s" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.130086 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13"} Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.130142 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c"} Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.130155 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7"} Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.130167 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.131457 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.131496 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.131508 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.135210 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674"} Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.135248 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108"} Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.135287 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.136623 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.136681 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.136689 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.137585 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34"} Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.137614 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69"} Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.137629 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af"} Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.141194 4708 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7" exitCode=0 Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.141285 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.141312 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7"} Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.142103 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.142137 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.142167 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.142999 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"777f2d524739d804f111dad8fa87947eada3bd79860cc39a14fda6cc1d70f3ff"} Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.143126 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.144102 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.144131 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.144145 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:25 crc kubenswrapper[4708]: W0203 07:10:25.208420 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:25 crc kubenswrapper[4708]: E0203 07:10:25.208524 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:25 crc kubenswrapper[4708]: W0203 07:10:25.272302 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:25 crc kubenswrapper[4708]: E0203 07:10:25.272401 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.283147 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.284225 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.284280 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.284292 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.284320 4708 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 03 07:10:25 crc kubenswrapper[4708]: E0203 07:10:25.284854 4708 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.53:6443: connect: connection refused" node="crc" Feb 03 07:10:25 crc kubenswrapper[4708]: I0203 07:10:25.535126 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:25 crc kubenswrapper[4708]: W0203 07:10:25.631936 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:25 crc kubenswrapper[4708]: E0203 07:10:25.632081 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:25 crc kubenswrapper[4708]: W0203 07:10:25.996691 4708 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:25 crc kubenswrapper[4708]: E0203 07:10:25.996825 4708 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.53:6443: connect: connection refused" logger="UnhandledError" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.025850 4708 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.039166 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 08:12:03.352389241 +0000 UTC Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.147702 4708 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf" exitCode=0 Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.147822 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf"} Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.147975 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.149131 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.149163 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.149175 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.153378 4708 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.153424 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.153946 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.154266 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257"} Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.154305 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc"} Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.154355 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.154722 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.155487 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.155514 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.155524 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.156075 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.156102 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.156110 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.156520 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.156546 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.156555 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.156927 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.156950 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.156960 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:26 crc kubenswrapper[4708]: I0203 07:10:26.829533 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.025707 4708 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.53:6443: connect: connection refused Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.039896 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 16:07:49.476425032 +0000 UTC Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.156925 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.158506 4708 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257" exitCode=255 Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.158567 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257"} Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.158601 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.159425 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.159456 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.159467 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.159983 4708 scope.go:117] "RemoveContainer" containerID="4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.161636 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c81f8040a3903c93c6a8e700733e6d306d4d057145562bca18df93f84e05c130"} Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.161665 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ed87f447ce590518e6c11016afed10c275ec305ce2f1569f049fa9e72e314389"} Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.161677 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1b91ee37b2b4a9d611dce015828570fb8b69da2b4cdbfb14452f58304390de9a"} Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.161682 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.162251 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.162271 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.162279 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.513175 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:27 crc kubenswrapper[4708]: I0203 07:10:27.988715 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.040759 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 11:35:14.392563133 +0000 UTC Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.166976 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.170866 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802"} Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.171052 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.171363 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.173527 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.173870 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.174010 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.174546 4708 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.178935 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a5281fb425defd1d58d168728e9d0c9c3e284733bf99c8657b71c3d70ad7faad"} Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.178998 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"53af1b2767b38d2af1ddc24c344dda295c79bdf418f155137756679ba689cd47"} Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.179039 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.180104 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.180216 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.180307 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.485898 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.487876 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.488093 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.488299 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:28 crc kubenswrapper[4708]: I0203 07:10:28.488491 4708 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 03 07:10:29 crc kubenswrapper[4708]: I0203 07:10:29.041732 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 11:39:27.110046384 +0000 UTC Feb 03 07:10:29 crc kubenswrapper[4708]: I0203 07:10:29.181786 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:29 crc kubenswrapper[4708]: I0203 07:10:29.182344 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:29 crc kubenswrapper[4708]: I0203 07:10:29.182392 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:29 crc kubenswrapper[4708]: I0203 07:10:29.182676 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:29 crc kubenswrapper[4708]: I0203 07:10:29.182703 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:29 crc kubenswrapper[4708]: I0203 07:10:29.182711 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:29 crc kubenswrapper[4708]: I0203 07:10:29.183346 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:29 crc kubenswrapper[4708]: I0203 07:10:29.183378 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:29 crc kubenswrapper[4708]: I0203 07:10:29.183391 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.043638 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 19:52:53.130609032 +0000 UTC Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.056961 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.057334 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.059054 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.059115 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.059137 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.107225 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.184978 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.184989 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.186491 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.186542 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.186556 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.187074 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.187101 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.187110 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.371644 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.371970 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.373705 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.373860 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.373877 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:30 crc kubenswrapper[4708]: I0203 07:10:30.379687 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:31 crc kubenswrapper[4708]: I0203 07:10:31.044716 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 16:42:04.176864375 +0000 UTC Feb 03 07:10:31 crc kubenswrapper[4708]: I0203 07:10:31.187575 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:31 crc kubenswrapper[4708]: I0203 07:10:31.187720 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:31 crc kubenswrapper[4708]: I0203 07:10:31.189241 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:31 crc kubenswrapper[4708]: I0203 07:10:31.189381 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:31 crc kubenswrapper[4708]: I0203 07:10:31.189472 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:31 crc kubenswrapper[4708]: I0203 07:10:31.625886 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:32 crc kubenswrapper[4708]: I0203 07:10:32.045541 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 12:25:06.325338863 +0000 UTC Feb 03 07:10:32 crc kubenswrapper[4708]: I0203 07:10:32.092024 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Feb 03 07:10:32 crc kubenswrapper[4708]: I0203 07:10:32.092223 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:32 crc kubenswrapper[4708]: I0203 07:10:32.093483 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:32 crc kubenswrapper[4708]: I0203 07:10:32.093570 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:32 crc kubenswrapper[4708]: I0203 07:10:32.093586 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:32 crc kubenswrapper[4708]: E0203 07:10:32.181258 4708 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 03 07:10:32 crc kubenswrapper[4708]: I0203 07:10:32.189390 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:32 crc kubenswrapper[4708]: I0203 07:10:32.190613 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:32 crc kubenswrapper[4708]: I0203 07:10:32.190664 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:32 crc kubenswrapper[4708]: I0203 07:10:32.190678 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:33 crc kubenswrapper[4708]: I0203 07:10:33.046044 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 07:09:19.766323333 +0000 UTC Feb 03 07:10:33 crc kubenswrapper[4708]: I0203 07:10:33.192721 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:33 crc kubenswrapper[4708]: I0203 07:10:33.195154 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:33 crc kubenswrapper[4708]: I0203 07:10:33.195202 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:33 crc kubenswrapper[4708]: I0203 07:10:33.195254 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:33 crc kubenswrapper[4708]: I0203 07:10:33.200614 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:34 crc kubenswrapper[4708]: I0203 07:10:34.047260 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 15:22:17.973299266 +0000 UTC Feb 03 07:10:34 crc kubenswrapper[4708]: I0203 07:10:34.194976 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:34 crc kubenswrapper[4708]: I0203 07:10:34.196310 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:34 crc kubenswrapper[4708]: I0203 07:10:34.196372 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:34 crc kubenswrapper[4708]: I0203 07:10:34.196393 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:34 crc kubenswrapper[4708]: I0203 07:10:34.626213 4708 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 03 07:10:34 crc kubenswrapper[4708]: I0203 07:10:34.626340 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:10:35 crc kubenswrapper[4708]: I0203 07:10:35.047405 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 15:34:10.852304511 +0000 UTC Feb 03 07:10:36 crc kubenswrapper[4708]: I0203 07:10:36.049447 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 09:50:43.57564204 +0000 UTC Feb 03 07:10:37 crc kubenswrapper[4708]: I0203 07:10:37.049782 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 07:20:58.074502648 +0000 UTC Feb 03 07:10:37 crc kubenswrapper[4708]: I0203 07:10:37.513645 4708 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 03 07:10:37 crc kubenswrapper[4708]: I0203 07:10:37.513756 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 03 07:10:38 crc kubenswrapper[4708]: I0203 07:10:38.026639 4708 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Feb 03 07:10:38 crc kubenswrapper[4708]: I0203 07:10:38.043368 4708 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 03 07:10:38 crc kubenswrapper[4708]: I0203 07:10:38.043434 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 03 07:10:38 crc kubenswrapper[4708]: I0203 07:10:38.050360 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 16:37:48.244653128 +0000 UTC Feb 03 07:10:39 crc kubenswrapper[4708]: I0203 07:10:39.052134 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 15:04:49.864376457 +0000 UTC Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.052562 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 14:45:43.198300441 +0000 UTC Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.137015 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.137499 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.138755 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.138887 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.138970 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.150720 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.213549 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.215249 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.215303 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:40 crc kubenswrapper[4708]: I0203 07:10:40.215321 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:41 crc kubenswrapper[4708]: I0203 07:10:41.054101 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 21:46:20.634153039 +0000 UTC Feb 03 07:10:42 crc kubenswrapper[4708]: I0203 07:10:42.055170 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 04:46:02.39030584 +0000 UTC Feb 03 07:10:42 crc kubenswrapper[4708]: E0203 07:10:42.181400 4708 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 03 07:10:42 crc kubenswrapper[4708]: I0203 07:10:42.519197 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:42 crc kubenswrapper[4708]: I0203 07:10:42.519405 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:42 crc kubenswrapper[4708]: I0203 07:10:42.520551 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:42 crc kubenswrapper[4708]: I0203 07:10:42.520619 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:42 crc kubenswrapper[4708]: I0203 07:10:42.520633 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:42 crc kubenswrapper[4708]: I0203 07:10:42.523940 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:43 crc kubenswrapper[4708]: E0203 07:10:43.033430 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.035790 4708 trace.go:236] Trace[172040812]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Feb-2026 07:10:31.116) (total time: 11919ms): Feb 03 07:10:43 crc kubenswrapper[4708]: Trace[172040812]: ---"Objects listed" error: 11919ms (07:10:43.035) Feb 03 07:10:43 crc kubenswrapper[4708]: Trace[172040812]: [11.919429449s] [11.919429449s] END Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.035909 4708 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.036089 4708 trace.go:236] Trace[1804790711]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Feb-2026 07:10:29.593) (total time: 13442ms): Feb 03 07:10:43 crc kubenswrapper[4708]: Trace[1804790711]: ---"Objects listed" error: 13442ms (07:10:43.035) Feb 03 07:10:43 crc kubenswrapper[4708]: Trace[1804790711]: [13.442284406s] [13.442284406s] END Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.036132 4708 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.038017 4708 trace.go:236] Trace[1476681715]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Feb-2026 07:10:29.971) (total time: 13066ms): Feb 03 07:10:43 crc kubenswrapper[4708]: Trace[1476681715]: ---"Objects listed" error: 13066ms (07:10:43.037) Feb 03 07:10:43 crc kubenswrapper[4708]: Trace[1476681715]: [13.066356629s] [13.066356629s] END Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.038051 4708 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.038508 4708 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.039937 4708 trace.go:236] Trace[766722611]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Feb-2026 07:10:30.094) (total time: 12945ms): Feb 03 07:10:43 crc kubenswrapper[4708]: Trace[766722611]: ---"Objects listed" error: 12945ms (07:10:43.039) Feb 03 07:10:43 crc kubenswrapper[4708]: Trace[766722611]: [12.945717504s] [12.945717504s] END Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.039978 4708 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 03 07:10:43 crc kubenswrapper[4708]: E0203 07:10:43.042547 4708 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.049725 4708 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.055650 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 03:16:05.628026319 +0000 UTC Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.069760 4708 csr.go:261] certificate signing request csr-vzb44 is approved, waiting to be issued Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.078506 4708 csr.go:257] certificate signing request csr-vzb44 is issued Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.211641 4708 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:39996->192.168.126.11:17697: read: connection reset by peer" start-of-body= Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.211719 4708 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:56312->192.168.126.11:17697: read: connection reset by peer" start-of-body= Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.211724 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:39996->192.168.126.11:17697: read: connection reset by peer" Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.211806 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:56312->192.168.126.11:17697: read: connection reset by peer" Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.221680 4708 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.221742 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.927281 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:43 crc kubenswrapper[4708]: I0203 07:10:43.934066 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.014364 4708 apiserver.go:52] "Watching apiserver" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.018104 4708 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.022201 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.022734 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.022748 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.022851 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.022922 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.023165 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.023555 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.023639 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.023549 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.023910 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.025386 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.025507 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.025507 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.026171 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.027004 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.027243 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.027860 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.030311 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.030446 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.039969 4708 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046462 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046520 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046560 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046593 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046623 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046647 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046672 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046714 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046743 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046771 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046852 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046876 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.046901 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047069 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047098 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047127 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047157 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047189 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047218 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047245 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047279 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047307 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047334 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047365 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047392 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047421 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047452 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047479 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047505 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047531 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047554 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047579 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047619 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047657 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047695 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047672 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047722 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047767 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047691 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047849 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047957 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.047988 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048013 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048038 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048286 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048324 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048347 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048369 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048395 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048424 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048446 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048467 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048487 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048512 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048537 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048559 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048580 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048590 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048603 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048626 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048649 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048670 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048654 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048670 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048692 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048945 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.048994 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049036 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049067 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049091 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049114 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049141 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049169 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049196 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049229 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049257 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049286 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049315 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049351 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049384 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049420 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049453 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049481 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049519 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049547 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049573 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049598 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049622 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049648 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049673 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049700 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049728 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049751 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049776 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049808 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049849 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049879 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049920 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049948 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049975 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050002 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050026 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050048 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050072 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050096 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050119 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050149 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050183 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050207 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050232 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050255 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050280 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050303 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050354 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050383 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050415 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050447 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050475 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050501 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050525 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050549 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050576 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050600 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050627 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050657 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050681 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050710 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.051326 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.051994 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052038 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052065 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052092 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052117 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052141 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052168 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052196 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052224 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052251 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052276 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052300 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052327 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052353 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052375 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052399 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052427 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052451 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052483 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052514 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052543 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052574 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052600 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052627 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052655 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052680 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052705 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052731 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052803 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052875 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052910 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052940 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052973 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053001 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053031 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053057 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053090 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053119 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053148 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053170 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053198 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053227 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053256 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053281 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053305 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053337 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053361 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053447 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053478 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053505 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053532 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053558 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053587 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053619 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053647 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053675 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053703 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053732 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053759 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053785 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053840 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053889 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053920 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053952 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053980 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054011 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054042 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054073 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054103 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054132 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054159 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054190 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054217 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054244 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054314 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054354 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054384 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054414 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054476 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054506 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054532 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054570 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054595 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054621 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054652 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054679 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054703 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054725 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054856 4708 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054879 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054899 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054916 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054933 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054948 4708 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049136 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049265 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049302 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049485 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049688 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.049938 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050115 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.057810 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050382 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050413 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050591 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.050923 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.051011 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.051214 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.051227 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.051470 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.051538 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.051529 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.052637 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053205 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053349 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053561 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.053912 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054035 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054332 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054377 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.054492 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.055035 4708 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.055184 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.055433 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.055693 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.055808 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.055865 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.055861 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.055890 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.055892 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.055929 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 01:43:57.549507489 +0000 UTC Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.056028 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:10:44.555994694 +0000 UTC m=+23.537941691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.056609 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.056630 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.056929 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.057059 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.057140 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.057219 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.057388 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.057412 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.057690 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.057687 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.057963 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.058916 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.058229 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.058325 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.058345 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.058546 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.058683 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.059078 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.059259 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.059396 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.059460 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.059975 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.060237 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.060345 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.060375 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.060684 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.061057 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.061444 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.061736 4708 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.061786 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.061805 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.062181 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.062420 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.062474 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.062502 4708 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.063242 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.064371 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.064848 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.065779 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.066319 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.066607 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.066930 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.067455 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.067417 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.067924 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.068140 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.068183 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.068197 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.068325 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.068678 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.070180 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.070252 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.070439 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.070875 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.070889 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.071038 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.071658 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.071669 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.071805 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.072375 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.072783 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.073259 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:44.573187842 +0000 UTC m=+23.555134649 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.073776 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:44.573758507 +0000 UTC m=+23.555705314 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.073977 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.074052 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.074333 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.074560 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.074621 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.074839 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.074922 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.075174 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.075394 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.076034 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.076369 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.077389 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.077434 4708 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.077566 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:44.577534603 +0000 UTC m=+23.559481600 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.077676 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.077688 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.077699 4708 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.077727 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:44.577720388 +0000 UTC m=+23.559667195 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.078392 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.078766 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.079005 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.079074 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.079452 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.079610 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.080955 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.081491 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.081763 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.083585 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.083827 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.084577 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.084761 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.084854 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.085058 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.085071 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.085171 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.085357 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.085600 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.085960 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.086764 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.086946 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.087105 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.087244 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.087361 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.087426 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.087441 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.087700 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.087768 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.088078 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.088098 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.088370 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.088505 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.088966 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.089288 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.089693 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.089745 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.090073 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.090685 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.090783 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.091425 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.091750 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.091895 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.092167 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.092244 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.092348 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.092656 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.092784 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.093307 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.093633 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.093671 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.094173 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.094291 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.094599 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.094768 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.095231 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.096549 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.096673 4708 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-03 07:05:43 +0000 UTC, rotation deadline is 2026-11-24 00:10:39.812005915 +0000 UTC Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.096705 4708 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7048h59m55.715304149s for next certificate rotation Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.096784 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.097663 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.098250 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.099948 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.101735 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.102384 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.102429 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.103104 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.103261 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.103569 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.103638 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.104097 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.104325 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.104893 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.105471 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.105647 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.106002 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.106165 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.106172 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.106218 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.106596 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.107071 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.107478 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.107701 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.108527 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.108708 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.110008 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.110147 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.113038 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.113169 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.113251 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.115497 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.116029 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.116220 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.116248 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"message\\\":\\\"W0203 07:10:26.155247 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 07:10:26.157445 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770102626 cert, and key in /tmp/serving-cert-3074372781/serving-signer.crt, /tmp/serving-cert-3074372781/serving-signer.key\\\\nI0203 07:10:26.527815 1 observer_polling.go:159] Starting file observer\\\\nW0203 07:10:26.530572 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 07:10:26.530764 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 07:10:26.531581 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3074372781/tls.crt::/tmp/serving-cert-3074372781/tls.key\\\\\\\"\\\\nF0203 07:10:26.771413 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.117088 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.117165 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.118632 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.124839 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.124950 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.126491 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.130777 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.132430 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.132750 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.134619 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.136052 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.136175 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.137653 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.138180 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.139598 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.141950 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.142944 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.143962 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.144637 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.145739 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.146050 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.147076 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.148587 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.149599 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.150456 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.152150 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.153115 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.154623 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155024 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155164 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155472 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155579 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155745 4708 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155767 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155781 4708 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155798 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155810 4708 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155843 4708 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155856 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155869 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155882 4708 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155895 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155907 4708 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155920 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155937 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155950 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155964 4708 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155978 4708 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155990 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.156003 4708 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.156015 4708 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.156031 4708 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.156046 4708 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.156059 4708 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.156072 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.156084 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.156096 4708 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.156109 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.156988 4708 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157138 4708 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157168 4708 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157226 4708 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157242 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157258 4708 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157281 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157302 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157319 4708 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157336 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157360 4708 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157381 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157394 4708 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157410 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157431 4708 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157452 4708 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157466 4708 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157478 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157502 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157514 4708 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157527 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157540 4708 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157561 4708 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157575 4708 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157588 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157619 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157669 4708 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157683 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157695 4708 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157714 4708 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157727 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157742 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157755 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157774 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157786 4708 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157802 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.157839 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.158938 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.158955 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.158970 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.158989 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159002 4708 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159014 4708 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159029 4708 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159046 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159059 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159071 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159084 4708 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159134 4708 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159147 4708 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159160 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159175 4708 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159189 4708 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159201 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159213 4708 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159230 4708 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.158272 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159292 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159332 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159353 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159381 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.159820 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.155920 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.160044 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.161996 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.162751 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.164044 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.160020 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.164400 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.164426 4708 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.164672 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.164675 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.164964 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165047 4708 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165111 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165264 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165327 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165387 4708 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165447 4708 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165511 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165569 4708 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165630 4708 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165696 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.165963 4708 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.166883 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.167009 4708 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.166071 4708 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.167749 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.168289 4708 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169163 4708 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169216 4708 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169227 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169239 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169249 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169259 4708 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169268 4708 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169277 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169290 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169302 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169314 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169326 4708 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169338 4708 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169349 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169360 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169371 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169382 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169392 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169403 4708 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169415 4708 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169427 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169439 4708 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169452 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169465 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169476 4708 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169486 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169501 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169513 4708 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169526 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169538 4708 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169550 4708 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169562 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169575 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169587 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169598 4708 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169608 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169617 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169626 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169636 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169645 4708 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169654 4708 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169668 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169679 4708 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169689 4708 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169698 4708 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169708 4708 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169718 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169727 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169737 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169746 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169756 4708 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169765 4708 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169774 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169785 4708 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169800 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169809 4708 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169819 4708 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169851 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169863 4708 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169874 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169885 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169897 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169906 4708 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.169917 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170519 4708 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170534 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170546 4708 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170558 4708 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170571 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170582 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170594 4708 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170605 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170616 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170629 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170641 4708 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170653 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170665 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170679 4708 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170692 4708 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170704 4708 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170717 4708 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170729 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170743 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.170855 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.171992 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.172799 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.173280 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.175894 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.177642 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.178496 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.179635 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.180338 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.181250 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.181986 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.183117 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.183700 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.184582 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.185152 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.186047 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.187003 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.187890 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.188360 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.189329 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.189880 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.190512 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.191467 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.226300 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.226794 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.228357 4708 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802" exitCode=255 Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.228471 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802"} Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.228570 4708 scope.go:117] "RemoveContainer" containerID="4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.229582 4708 scope.go:117] "RemoveContainer" containerID="79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.230031 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.235579 4708 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.240250 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.250643 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.261831 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"message\\\":\\\"W0203 07:10:26.155247 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 07:10:26.157445 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770102626 cert, and key in /tmp/serving-cert-3074372781/serving-signer.crt, /tmp/serving-cert-3074372781/serving-signer.key\\\\nI0203 07:10:26.527815 1 observer_polling.go:159] Starting file observer\\\\nW0203 07:10:26.530572 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 07:10:26.530764 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 07:10:26.531581 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3074372781/tls.crt::/tmp/serving-cert-3074372781/tls.key\\\\\\\"\\\\nF0203 07:10:26.771413 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.274513 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.286142 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.295781 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.306594 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.318241 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.338574 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.350693 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 03 07:10:44 crc kubenswrapper[4708]: W0203 07:10:44.351893 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-6206e031e3dc400e5a20de1007ed944123bb7b68ae5b71b30ed7b37b35dd0afb WatchSource:0}: Error finding container 6206e031e3dc400e5a20de1007ed944123bb7b68ae5b71b30ed7b37b35dd0afb: Status 404 returned error can't find the container with id 6206e031e3dc400e5a20de1007ed944123bb7b68ae5b71b30ed7b37b35dd0afb Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.365706 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.495554 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-xnhzd"] Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.495923 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-xnhzd" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.496141 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-f2fzr"] Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.496714 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.500930 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.501248 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.501410 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.501677 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.501911 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.502080 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.502227 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.502416 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.514629 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.525352 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.538296 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"message\\\":\\\"W0203 07:10:26.155247 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 07:10:26.157445 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770102626 cert, and key in /tmp/serving-cert-3074372781/serving-signer.crt, /tmp/serving-cert-3074372781/serving-signer.key\\\\nI0203 07:10:26.527815 1 observer_polling.go:159] Starting file observer\\\\nW0203 07:10:26.530572 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 07:10:26.530764 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 07:10:26.531581 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3074372781/tls.crt::/tmp/serving-cert-3074372781/tls.key\\\\\\\"\\\\nF0203 07:10:26.771413 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.549268 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.559798 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575163 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575579 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575655 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575691 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-cnibin\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575717 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-system-cni-dir\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575733 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7cedfe91-d1c3-4c56-9aac-797ecade9468-cni-binary-copy\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575750 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-conf-dir\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.575772 4708 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.575879 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:45.575860533 +0000 UTC m=+24.557807340 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575803 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-var-lib-cni-multus\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575935 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr9k9\" (UniqueName: \"kubernetes.io/projected/50972c31-a8a8-4de2-a88e-9b77c33a1b0b-kube-api-access-vr9k9\") pod \"node-resolver-xnhzd\" (UID: \"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\") " pod="openshift-dns/node-resolver-xnhzd" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575958 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-run-k8s-cni-cncf-io\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.575975 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-run-netns\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576002 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-os-release\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576017 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-var-lib-cni-bin\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576030 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-var-lib-kubelet\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576048 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-run-multus-certs\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576062 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/50972c31-a8a8-4de2-a88e-9b77c33a1b0b-hosts-file\") pod \"node-resolver-xnhzd\" (UID: \"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\") " pod="openshift-dns/node-resolver-xnhzd" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576093 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-daemon-config\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576110 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-etc-kubernetes\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576176 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7g6k\" (UniqueName: \"kubernetes.io/projected/7cedfe91-d1c3-4c56-9aac-797ecade9468-kube-api-access-w7g6k\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576222 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576248 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-cni-dir\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576271 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-socket-dir-parent\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.576313 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-hostroot\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.576397 4708 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.576448 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:45.576432259 +0000 UTC m=+24.558379066 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.576763 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:10:45.576747867 +0000 UTC m=+24.558694694 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.585090 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.600614 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.612158 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.628778 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.641263 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.656682 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"message\\\":\\\"W0203 07:10:26.155247 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 07:10:26.157445 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770102626 cert, and key in /tmp/serving-cert-3074372781/serving-signer.crt, /tmp/serving-cert-3074372781/serving-signer.key\\\\nI0203 07:10:26.527815 1 observer_polling.go:159] Starting file observer\\\\nW0203 07:10:26.530572 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 07:10:26.530764 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 07:10:26.531581 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3074372781/tls.crt::/tmp/serving-cert-3074372781/tls.key\\\\\\\"\\\\nF0203 07:10:26.771413 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.668008 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677396 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-run-k8s-cni-cncf-io\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677450 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-run-netns\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677476 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr9k9\" (UniqueName: \"kubernetes.io/projected/50972c31-a8a8-4de2-a88e-9b77c33a1b0b-kube-api-access-vr9k9\") pod \"node-resolver-xnhzd\" (UID: \"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\") " pod="openshift-dns/node-resolver-xnhzd" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677499 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677520 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-os-release\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677537 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-var-lib-cni-bin\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677554 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-var-lib-kubelet\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677571 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/50972c31-a8a8-4de2-a88e-9b77c33a1b0b-hosts-file\") pod \"node-resolver-xnhzd\" (UID: \"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\") " pod="openshift-dns/node-resolver-xnhzd" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677589 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-run-multus-certs\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677607 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-daemon-config\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677621 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-etc-kubernetes\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677622 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-run-k8s-cni-cncf-io\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677640 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7g6k\" (UniqueName: \"kubernetes.io/projected/7cedfe91-d1c3-4c56-9aac-797ecade9468-kube-api-access-w7g6k\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677838 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-cni-dir\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677845 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-var-lib-kubelet\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677931 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-run-multus-certs\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678006 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-etc-kubernetes\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677857 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-run-netns\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678091 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-var-lib-cni-bin\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.677866 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-socket-dir-parent\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678149 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-os-release\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678159 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-hostroot\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678113 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-socket-dir-parent\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.678117 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678206 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-cnibin\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678199 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/50972c31-a8a8-4de2-a88e-9b77c33a1b0b-hosts-file\") pod \"node-resolver-xnhzd\" (UID: \"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\") " pod="openshift-dns/node-resolver-xnhzd" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678231 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.678243 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678257 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-system-cni-dir\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.678270 4708 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678281 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7cedfe91-d1c3-4c56-9aac-797ecade9468-cni-binary-copy\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678302 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-conf-dir\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678304 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-cnibin\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678312 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-cni-dir\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678351 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-system-cni-dir\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678252 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-hostroot\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678337 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-conf-dir\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.678346 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.678461 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.678487 4708 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.678359 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:45.678318873 +0000 UTC m=+24.660265680 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:44 crc kubenswrapper[4708]: E0203 07:10:44.678561 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:45.678539189 +0000 UTC m=+24.660486176 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678592 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-var-lib-cni-multus\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678559 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/7cedfe91-d1c3-4c56-9aac-797ecade9468-host-var-lib-cni-multus\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.678705 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/7cedfe91-d1c3-4c56-9aac-797ecade9468-multus-daemon-config\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.679038 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7cedfe91-d1c3-4c56-9aac-797ecade9468-cni-binary-copy\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.681837 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.698594 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.704418 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr9k9\" (UniqueName: \"kubernetes.io/projected/50972c31-a8a8-4de2-a88e-9b77c33a1b0b-kube-api-access-vr9k9\") pod \"node-resolver-xnhzd\" (UID: \"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\") " pod="openshift-dns/node-resolver-xnhzd" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.706697 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7g6k\" (UniqueName: \"kubernetes.io/projected/7cedfe91-d1c3-4c56-9aac-797ecade9468-kube-api-access-w7g6k\") pod \"multus-f2fzr\" (UID: \"7cedfe91-d1c3-4c56-9aac-797ecade9468\") " pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.716891 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.728654 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.746293 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.759377 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.815062 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-xnhzd" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.821673 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-f2fzr" Feb 03 07:10:44 crc kubenswrapper[4708]: W0203 07:10:44.829514 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50972c31_a8a8_4de2_a88e_9b77c33a1b0b.slice/crio-ba0d5a263821bd3e1364c394e505a8763d863e541eb71022e7a147d7573feca2 WatchSource:0}: Error finding container ba0d5a263821bd3e1364c394e505a8763d863e541eb71022e7a147d7573feca2: Status 404 returned error can't find the container with id ba0d5a263821bd3e1364c394e505a8763d863e541eb71022e7a147d7573feca2 Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.888442 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-r94bn"] Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.888865 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.891097 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.892019 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.892110 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.892194 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.892110 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.892780 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-fj5fc"] Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.893659 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.895739 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.895837 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.908948 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.937375 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.955610 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.972833 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"message\\\":\\\"W0203 07:10:26.155247 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 07:10:26.157445 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770102626 cert, and key in /tmp/serving-cert-3074372781/serving-signer.crt, /tmp/serving-cert-3074372781/serving-signer.key\\\\nI0203 07:10:26.527815 1 observer_polling.go:159] Starting file observer\\\\nW0203 07:10:26.530572 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 07:10:26.530764 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 07:10:26.531581 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3074372781/tls.crt::/tmp/serving-cert-3074372781/tls.key\\\\\\\"\\\\nF0203 07:10:26.771413 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.981579 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-os-release\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.981613 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.981630 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/67498414-5132-496e-9638-189f5941ace0-mcd-auth-proxy-config\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.981652 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/67498414-5132-496e-9638-189f5941ace0-rootfs\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.981680 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kx9t\" (UniqueName: \"kubernetes.io/projected/9b5667f2-69df-408c-81af-c50c160ad409-kube-api-access-9kx9t\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.981785 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9b5667f2-69df-408c-81af-c50c160ad409-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.981879 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82zhj\" (UniqueName: \"kubernetes.io/projected/67498414-5132-496e-9638-189f5941ace0-kube-api-access-82zhj\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.981942 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-system-cni-dir\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.981977 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/67498414-5132-496e-9638-189f5941ace0-proxy-tls\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.982006 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9b5667f2-69df-408c-81af-c50c160ad409-cni-binary-copy\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.982052 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-cnibin\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.986935 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:44 crc kubenswrapper[4708]: I0203 07:10:44.999248 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.009598 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.026422 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.038648 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.051859 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.063009 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.073165 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 18:40:15.333272392 +0000 UTC Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.074947 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"message\\\":\\\"W0203 07:10:26.155247 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 07:10:26.157445 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770102626 cert, and key in /tmp/serving-cert-3074372781/serving-signer.crt, /tmp/serving-cert-3074372781/serving-signer.key\\\\nI0203 07:10:26.527815 1 observer_polling.go:159] Starting file observer\\\\nW0203 07:10:26.530572 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 07:10:26.530764 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 07:10:26.531581 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3074372781/tls.crt::/tmp/serving-cert-3074372781/tls.key\\\\\\\"\\\\nF0203 07:10:26.771413 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.082973 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9b5667f2-69df-408c-81af-c50c160ad409-cni-binary-copy\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083048 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-cnibin\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083071 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/67498414-5132-496e-9638-189f5941ace0-mcd-auth-proxy-config\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083094 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-os-release\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083110 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083128 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/67498414-5132-496e-9638-189f5941ace0-rootfs\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083147 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kx9t\" (UniqueName: \"kubernetes.io/projected/9b5667f2-69df-408c-81af-c50c160ad409-kube-api-access-9kx9t\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083174 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9b5667f2-69df-408c-81af-c50c160ad409-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083206 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82zhj\" (UniqueName: \"kubernetes.io/projected/67498414-5132-496e-9638-189f5941ace0-kube-api-access-82zhj\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083236 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/67498414-5132-496e-9638-189f5941ace0-proxy-tls\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083256 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-system-cni-dir\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083315 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-system-cni-dir\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.083966 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/67498414-5132-496e-9638-189f5941ace0-rootfs\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.084047 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9b5667f2-69df-408c-81af-c50c160ad409-cni-binary-copy\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.084059 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-cnibin\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.084779 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/67498414-5132-496e-9638-189f5941ace0-mcd-auth-proxy-config\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.084870 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9b5667f2-69df-408c-81af-c50c160ad409-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.085201 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-os-release\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.085261 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9b5667f2-69df-408c-81af-c50c160ad409-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.087029 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.092352 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/67498414-5132-496e-9638-189f5941ace0-proxy-tls\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.092607 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.092762 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.101139 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82zhj\" (UniqueName: \"kubernetes.io/projected/67498414-5132-496e-9638-189f5941ace0-kube-api-access-82zhj\") pod \"machine-config-daemon-r94bn\" (UID: \"67498414-5132-496e-9638-189f5941ace0\") " pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.101492 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.103260 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kx9t\" (UniqueName: \"kubernetes.io/projected/9b5667f2-69df-408c-81af-c50c160ad409-kube-api-access-9kx9t\") pod \"multus-additional-cni-plugins-fj5fc\" (UID: \"9b5667f2-69df-408c-81af-c50c160ad409\") " pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.114182 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.124292 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.135206 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.145696 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.156276 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.168393 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.179304 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.190679 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.201858 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.219074 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.227424 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" Feb 03 07:10:45 crc kubenswrapper[4708]: W0203 07:10:45.231994 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67498414_5132_496e_9638_189f5941ace0.slice/crio-471e7b55a32b232123fa3bab329341407fba91c8999b4bb5263a923051272ec8 WatchSource:0}: Error finding container 471e7b55a32b232123fa3bab329341407fba91c8999b4bb5263a923051272ec8: Status 404 returned error can't find the container with id 471e7b55a32b232123fa3bab329341407fba91c8999b4bb5263a923051272ec8 Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.233264 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"fd4da7f84d3feb5d8190293197dc9ecaf3f3a9f7deff41ea0da938f704a2e6a0"} Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.235056 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2fzr" event={"ID":"7cedfe91-d1c3-4c56-9aac-797ecade9468","Type":"ContainerStarted","Data":"5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd"} Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.235479 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2fzr" event={"ID":"7cedfe91-d1c3-4c56-9aac-797ecade9468","Type":"ContainerStarted","Data":"c2da5c122ba37427e0c2d99c3588ec3f2fddfae1c4f5f8b69afa8609bdbdbb72"} Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.236889 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-xnhzd" event={"ID":"50972c31-a8a8-4de2-a88e-9b77c33a1b0b","Type":"ContainerStarted","Data":"d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388"} Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.236916 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-xnhzd" event={"ID":"50972c31-a8a8-4de2-a88e-9b77c33a1b0b","Type":"ContainerStarted","Data":"ba0d5a263821bd3e1364c394e505a8763d863e541eb71022e7a147d7573feca2"} Feb 03 07:10:45 crc kubenswrapper[4708]: W0203 07:10:45.241552 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b5667f2_69df_408c_81af_c50c160ad409.slice/crio-8b885996a9bd3ab82d2ddc50ee2536b63d0026b36717b92f8f02c5f4749d6513 WatchSource:0}: Error finding container 8b885996a9bd3ab82d2ddc50ee2536b63d0026b36717b92f8f02c5f4749d6513: Status 404 returned error can't find the container with id 8b885996a9bd3ab82d2ddc50ee2536b63d0026b36717b92f8f02c5f4749d6513 Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.241767 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0"} Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.242063 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d"} Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.242079 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"899d7f160dee01023fed3e9097b9ded2be477ac1a19abc34e78e87bbc0970599"} Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.247636 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.252473 4708 scope.go:117] "RemoveContainer" containerID="79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802" Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.252658 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.253595 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca"} Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.253659 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6206e031e3dc400e5a20de1007ed944123bb7b68ae5b71b30ed7b37b35dd0afb"} Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.259873 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.266128 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2sfqf"] Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.267004 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.269277 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.269625 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.269796 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.274204 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.276081 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.298205 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.317214 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.337869 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.385563 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.386875 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-openvswitch\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.386924 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-ovn-kubernetes\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.386960 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-log-socket\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387047 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-systemd\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387078 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-script-lib\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387105 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-ovn\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387127 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-config\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387148 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-netns\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387206 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-kubelet\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387293 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-var-lib-openvswitch\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387366 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-bin\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387401 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b0d14461-efec-4909-82de-2cce585892a4-ovn-node-metrics-cert\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387495 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-slash\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387592 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-systemd-units\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387641 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-etc-openvswitch\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387672 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-env-overrides\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387702 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-node-log\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387761 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-netd\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387857 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtfmh\" (UniqueName: \"kubernetes.io/projected/b0d14461-efec-4909-82de-2cce585892a4-kube-api-access-qtfmh\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.387900 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.426169 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f2691b03a6c296f6d88d5d94a675c09993fa63e3a19d8adebb9115b0c245257\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"message\\\":\\\"W0203 07:10:26.155247 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0203 07:10:26.157445 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770102626 cert, and key in /tmp/serving-cert-3074372781/serving-signer.crt, /tmp/serving-cert-3074372781/serving-signer.key\\\\nI0203 07:10:26.527815 1 observer_polling.go:159] Starting file observer\\\\nW0203 07:10:26.530572 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0203 07:10:26.530764 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0203 07:10:26.531581 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3074372781/tls.crt::/tmp/serving-cert-3074372781/tls.key\\\\\\\"\\\\nF0203 07:10:26.771413 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.469514 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489251 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-systemd-units\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489405 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-etc-openvswitch\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489484 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-env-overrides\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489556 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-node-log\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489630 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-netd\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489706 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtfmh\" (UniqueName: \"kubernetes.io/projected/b0d14461-efec-4909-82de-2cce585892a4-kube-api-access-qtfmh\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489790 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489893 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-openvswitch\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489963 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-node-log\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489541 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-systemd-units\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489984 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-ovn-kubernetes\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490075 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-log-socket\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489598 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-etc-openvswitch\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490109 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-systemd\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490144 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-script-lib\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490171 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-log-socket\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490199 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-ovn\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490226 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-openvswitch\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490237 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-config\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490275 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-netns\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.489932 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-netd\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490326 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-bin\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490341 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-ovn\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490360 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b0d14461-efec-4909-82de-2cce585892a4-ovn-node-metrics-cert\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490388 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-systemd\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490393 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-kubelet\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490432 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-var-lib-openvswitch\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490466 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-slash\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490535 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-slash\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490584 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-bin\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490395 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-env-overrides\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490649 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-ovn-kubernetes\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490754 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-kubelet\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490757 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-var-lib-openvswitch\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490780 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-netns\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.490275 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.491369 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-config\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.491645 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-script-lib\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.503417 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b0d14461-efec-4909-82de-2cce585892a4-ovn-node-metrics-cert\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.508119 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.536692 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtfmh\" (UniqueName: \"kubernetes.io/projected/b0d14461-efec-4909-82de-2cce585892a4-kube-api-access-qtfmh\") pod \"ovnkube-node-2sfqf\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.565133 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.591655 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.591855 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.591928 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.592031 4708 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.592071 4708 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.592111 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:47.592089099 +0000 UTC m=+26.574035906 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.592180 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:47.592155461 +0000 UTC m=+26.574102268 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.592247 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:10:47.592236203 +0000 UTC m=+26.574183010 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.605878 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.607537 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:45 crc kubenswrapper[4708]: W0203 07:10:45.619662 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0d14461_efec_4909_82de_2cce585892a4.slice/crio-b7ba1fa84452193eec5442ff36fb3097b8a7724c6051aefe0ff59d7d03f1bad7 WatchSource:0}: Error finding container b7ba1fa84452193eec5442ff36fb3097b8a7724c6051aefe0ff59d7d03f1bad7: Status 404 returned error can't find the container with id b7ba1fa84452193eec5442ff36fb3097b8a7724c6051aefe0ff59d7d03f1bad7 Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.649604 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.689989 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.693328 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.693387 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.693602 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.693644 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.693674 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.693691 4708 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.693652 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.693757 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:47.693730426 +0000 UTC m=+26.675677233 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.693766 4708 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:45 crc kubenswrapper[4708]: E0203 07:10:45.693857 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:47.693831248 +0000 UTC m=+26.675778235 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.727119 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.769824 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.814721 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.846449 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.884847 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.928427 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:45 crc kubenswrapper[4708]: I0203 07:10:45.968263 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.012807 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.051789 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.073766 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 08:48:41.834295581 +0000 UTC Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.092914 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.092939 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:46 crc kubenswrapper[4708]: E0203 07:10:46.093087 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:10:46 crc kubenswrapper[4708]: E0203 07:10:46.093234 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.094174 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.097007 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.123917 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.166623 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.207679 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.245773 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.257947 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab"} Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.258009 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4"} Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.258023 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"471e7b55a32b232123fa3bab329341407fba91c8999b4bb5263a923051272ec8"} Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.259502 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8" exitCode=0 Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.259553 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8"} Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.259577 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"b7ba1fa84452193eec5442ff36fb3097b8a7724c6051aefe0ff59d7d03f1bad7"} Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.261599 4708 generic.go:334] "Generic (PLEG): container finished" podID="9b5667f2-69df-408c-81af-c50c160ad409" containerID="ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7" exitCode=0 Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.261757 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" event={"ID":"9b5667f2-69df-408c-81af-c50c160ad409","Type":"ContainerDied","Data":"ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7"} Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.262059 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" event={"ID":"9b5667f2-69df-408c-81af-c50c160ad409","Type":"ContainerStarted","Data":"8b885996a9bd3ab82d2ddc50ee2536b63d0026b36717b92f8f02c5f4749d6513"} Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.291588 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.329842 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.377599 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.405221 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.445635 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.487933 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.527930 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.567149 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.608012 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.645455 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.688786 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.730952 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.768749 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-nr7n2"] Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.769141 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.770623 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.777118 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.798307 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.818775 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.836466 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.887008 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.904089 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b38d3e3-ef31-43ed-88fe-a896d4e73c7d-host\") pod \"node-ca-nr7n2\" (UID: \"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\") " pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.904144 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3b38d3e3-ef31-43ed-88fe-a896d4e73c7d-serviceca\") pod \"node-ca-nr7n2\" (UID: \"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\") " pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.904311 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zxtm\" (UniqueName: \"kubernetes.io/projected/3b38d3e3-ef31-43ed-88fe-a896d4e73c7d-kube-api-access-8zxtm\") pod \"node-ca-nr7n2\" (UID: \"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\") " pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.926507 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:46 crc kubenswrapper[4708]: I0203 07:10:46.970623 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.005204 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zxtm\" (UniqueName: \"kubernetes.io/projected/3b38d3e3-ef31-43ed-88fe-a896d4e73c7d-kube-api-access-8zxtm\") pod \"node-ca-nr7n2\" (UID: \"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\") " pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.005675 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b38d3e3-ef31-43ed-88fe-a896d4e73c7d-host\") pod \"node-ca-nr7n2\" (UID: \"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\") " pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.005697 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3b38d3e3-ef31-43ed-88fe-a896d4e73c7d-serviceca\") pod \"node-ca-nr7n2\" (UID: \"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\") " pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.005874 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b38d3e3-ef31-43ed-88fe-a896d4e73c7d-host\") pod \"node-ca-nr7n2\" (UID: \"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\") " pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.006599 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3b38d3e3-ef31-43ed-88fe-a896d4e73c7d-serviceca\") pod \"node-ca-nr7n2\" (UID: \"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\") " pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.008107 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.039930 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zxtm\" (UniqueName: \"kubernetes.io/projected/3b38d3e3-ef31-43ed-88fe-a896d4e73c7d-kube-api-access-8zxtm\") pod \"node-ca-nr7n2\" (UID: \"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\") " pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.066073 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.074483 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 08:20:28.27887625 +0000 UTC Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.092225 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.093000 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.111273 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.146091 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.186327 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.226924 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.267511 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.269444 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53"} Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.269500 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545"} Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.269511 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1"} Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.269522 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff"} Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.276237 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-nr7n2" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.280287 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" event={"ID":"9b5667f2-69df-408c-81af-c50c160ad409","Type":"ContainerStarted","Data":"9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2"} Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.306059 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.345681 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.391552 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.426141 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.467700 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.509194 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.547980 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.588544 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.612334 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.612455 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.612487 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.612569 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:10:51.612540347 +0000 UTC m=+30.594487194 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.612608 4708 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.612623 4708 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.612647 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:51.612639059 +0000 UTC m=+30.594585866 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.612739 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:51.612721261 +0000 UTC m=+30.594668068 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.625215 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.664549 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.704669 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.713195 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.713243 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.713392 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.713438 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.713454 4708 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.713515 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:51.713495224 +0000 UTC m=+30.695442031 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.713392 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.713544 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.713556 4708 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.713590 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:51.713581197 +0000 UTC m=+30.695528004 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.750465 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.785314 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.825856 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.865106 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.905053 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.943320 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.984701 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:47Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.988702 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:10:47 crc kubenswrapper[4708]: I0203 07:10:47.989352 4708 scope.go:117] "RemoveContainer" containerID="79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802" Feb 03 07:10:47 crc kubenswrapper[4708]: E0203 07:10:47.989518 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.025584 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.074688 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 18:32:57.097471638 +0000 UTC Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.092912 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.093006 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:48 crc kubenswrapper[4708]: E0203 07:10:48.093063 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:10:48 crc kubenswrapper[4708]: E0203 07:10:48.093149 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.284665 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-nr7n2" event={"ID":"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d","Type":"ContainerStarted","Data":"a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a"} Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.285170 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-nr7n2" event={"ID":"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d","Type":"ContainerStarted","Data":"5b54ccff258b13720af269bfa69d253454ce0ada3db9c67a981b3ffa0a332126"} Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.286159 4708 generic.go:334] "Generic (PLEG): container finished" podID="9b5667f2-69df-408c-81af-c50c160ad409" containerID="9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2" exitCode=0 Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.286236 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" event={"ID":"9b5667f2-69df-408c-81af-c50c160ad409","Type":"ContainerDied","Data":"9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2"} Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.287304 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902"} Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.291159 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59"} Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.291209 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597"} Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.307763 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.323329 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.341287 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.356311 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.371149 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.389330 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.402957 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.416906 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.432394 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.446513 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.463109 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.506177 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.546983 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.585712 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.623642 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.669378 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.703613 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.748226 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.787402 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.834443 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.867590 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.905734 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.944839 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:48 crc kubenswrapper[4708]: I0203 07:10:48.987138 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:48Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.032249 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.065716 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.075204 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 11:07:18.430511531 +0000 UTC Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.092912 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:49 crc kubenswrapper[4708]: E0203 07:10:49.093092 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.107836 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.147521 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.297793 4708 generic.go:334] "Generic (PLEG): container finished" podID="9b5667f2-69df-408c-81af-c50c160ad409" containerID="12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2" exitCode=0 Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.297839 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" event={"ID":"9b5667f2-69df-408c-81af-c50c160ad409","Type":"ContainerDied","Data":"12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2"} Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.315108 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.334073 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.348237 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.369979 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.383935 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.398941 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.427509 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.442653 4708 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.444672 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.444702 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.444712 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.444824 4708 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.464610 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.517514 4708 kubelet_node_status.go:115] "Node was previously registered" node="crc" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.517859 4708 kubelet_node_status.go:79] "Successfully registered node" node="crc" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.519062 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.519093 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.519105 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.519124 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.519139 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:49Z","lastTransitionTime":"2026-02-03T07:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:49 crc kubenswrapper[4708]: E0203 07:10:49.534128 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.538939 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.538982 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.538990 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.539009 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.539019 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:49Z","lastTransitionTime":"2026-02-03T07:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.544717 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: E0203 07:10:49.550083 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.557032 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.557067 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.557077 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.557094 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.557108 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:49Z","lastTransitionTime":"2026-02-03T07:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:49 crc kubenswrapper[4708]: E0203 07:10:49.569794 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.573853 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.573889 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.573898 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.573911 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.573921 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:49Z","lastTransitionTime":"2026-02-03T07:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.584060 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: E0203 07:10:49.587946 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.591675 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.591712 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.591725 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.591746 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.591758 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:49Z","lastTransitionTime":"2026-02-03T07:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:49 crc kubenswrapper[4708]: E0203 07:10:49.603841 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: E0203 07:10:49.603953 4708 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.605532 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.605574 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.605587 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.605605 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.605616 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:49Z","lastTransitionTime":"2026-02-03T07:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.627043 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.666034 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.704146 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.708115 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.708166 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.708181 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.708200 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.708211 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:49Z","lastTransitionTime":"2026-02-03T07:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.746148 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:49Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.810232 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.810325 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.810337 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.810356 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.810367 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:49Z","lastTransitionTime":"2026-02-03T07:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.913510 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.913558 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.913571 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.913590 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:49 crc kubenswrapper[4708]: I0203 07:10:49.913604 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:49Z","lastTransitionTime":"2026-02-03T07:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.017296 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.017343 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.017355 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.017374 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.017387 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:50Z","lastTransitionTime":"2026-02-03T07:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.076414 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 05:22:24.952947992 +0000 UTC Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.092981 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.093069 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:50 crc kubenswrapper[4708]: E0203 07:10:50.093181 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:10:50 crc kubenswrapper[4708]: E0203 07:10:50.093309 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.120270 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.120319 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.120337 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.120357 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.120374 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:50Z","lastTransitionTime":"2026-02-03T07:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.223330 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.223381 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.223396 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.223469 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.223483 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:50Z","lastTransitionTime":"2026-02-03T07:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.303361 4708 generic.go:334] "Generic (PLEG): container finished" podID="9b5667f2-69df-408c-81af-c50c160ad409" containerID="cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6" exitCode=0 Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.303413 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" event={"ID":"9b5667f2-69df-408c-81af-c50c160ad409","Type":"ContainerDied","Data":"cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.308139 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.318422 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.325915 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.325959 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.325970 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.325987 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.326000 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:50Z","lastTransitionTime":"2026-02-03T07:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.330374 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.343789 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.356456 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.371860 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.383154 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.394403 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.407276 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.421331 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.427875 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.427913 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.427925 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.427942 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.427955 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:50Z","lastTransitionTime":"2026-02-03T07:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.435442 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.452644 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.465881 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.478139 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.505749 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:50Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.530638 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.530690 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.530701 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.530722 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.530735 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:50Z","lastTransitionTime":"2026-02-03T07:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.632959 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.633004 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.633013 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.633029 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.633040 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:50Z","lastTransitionTime":"2026-02-03T07:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.736340 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.736383 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.736401 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.736420 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.736435 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:50Z","lastTransitionTime":"2026-02-03T07:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.838725 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.838754 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.838764 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.838776 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.838785 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:50Z","lastTransitionTime":"2026-02-03T07:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.942110 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.942161 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.942173 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.942193 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:50 crc kubenswrapper[4708]: I0203 07:10:50.942206 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:50Z","lastTransitionTime":"2026-02-03T07:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.045730 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.045814 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.045828 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.045852 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.045865 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:51Z","lastTransitionTime":"2026-02-03T07:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.077360 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 17:44:45.991655548 +0000 UTC Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.092787 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.092976 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.149282 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.149334 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.149345 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.149364 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.149379 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:51Z","lastTransitionTime":"2026-02-03T07:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.252596 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.252637 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.252647 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.252660 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.252671 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:51Z","lastTransitionTime":"2026-02-03T07:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.321134 4708 generic.go:334] "Generic (PLEG): container finished" podID="9b5667f2-69df-408c-81af-c50c160ad409" containerID="2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f" exitCode=0 Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.321197 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" event={"ID":"9b5667f2-69df-408c-81af-c50c160ad409","Type":"ContainerDied","Data":"2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.338991 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.355258 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.355406 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.355463 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.355481 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.355507 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.355526 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:51Z","lastTransitionTime":"2026-02-03T07:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.372246 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.386589 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.403926 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.423900 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.437524 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.453622 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.457745 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.457794 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.457824 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.457847 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.457862 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:51Z","lastTransitionTime":"2026-02-03T07:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.468101 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.479544 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.495489 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.519450 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.537173 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.551374 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.560494 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.560531 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.560543 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.560565 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.560581 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:51Z","lastTransitionTime":"2026-02-03T07:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.659236 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.659342 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.659373 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.659485 4708 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.659506 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:10:59.659474759 +0000 UTC m=+38.641421566 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.659544 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:59.65953732 +0000 UTC m=+38.641484127 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.659508 4708 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.659739 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:59.659696175 +0000 UTC m=+38.641643052 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.663271 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.663316 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.663329 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.663347 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.663360 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:51Z","lastTransitionTime":"2026-02-03T07:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.760542 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.761117 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.760817 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.761316 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.761230 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.761387 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.761409 4708 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.761491 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:59.761465056 +0000 UTC m=+38.743411893 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.761995 4708 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:51 crc kubenswrapper[4708]: E0203 07:10:51.762178 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 07:10:59.762088613 +0000 UTC m=+38.744035420 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.768422 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.768488 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.768501 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.768521 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.768535 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:51Z","lastTransitionTime":"2026-02-03T07:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.783909 4708 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.872113 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.872175 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.872221 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.872255 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.872270 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:51Z","lastTransitionTime":"2026-02-03T07:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.975211 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.975250 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.975288 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.975305 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:51 crc kubenswrapper[4708]: I0203 07:10:51.975314 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:51Z","lastTransitionTime":"2026-02-03T07:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.077571 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 21:15:40.080971603 +0000 UTC Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.078256 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.078280 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.078291 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.078312 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.078324 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:52Z","lastTransitionTime":"2026-02-03T07:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.091982 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.092041 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:52 crc kubenswrapper[4708]: E0203 07:10:52.092119 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:10:52 crc kubenswrapper[4708]: E0203 07:10:52.092189 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.111942 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.128472 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.144780 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.158885 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.170349 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.181286 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.181330 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.181338 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.181352 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.181362 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:52Z","lastTransitionTime":"2026-02-03T07:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.182382 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.207689 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.220459 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.233176 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.246171 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.262541 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.271659 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.284360 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.284400 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.284410 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.284426 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.284436 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:52Z","lastTransitionTime":"2026-02-03T07:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.286476 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.301928 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.329001 4708 generic.go:334] "Generic (PLEG): container finished" podID="9b5667f2-69df-408c-81af-c50c160ad409" containerID="e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7" exitCode=0 Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.329038 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" event={"ID":"9b5667f2-69df-408c-81af-c50c160ad409","Type":"ContainerDied","Data":"e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.334957 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.335282 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.379226 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.389071 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.389138 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.389157 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.389219 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.389234 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:52Z","lastTransitionTime":"2026-02-03T07:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.393999 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.408116 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.412831 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.424133 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.439525 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.462925 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.479184 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.491939 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.491977 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.492009 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.492030 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.492044 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:52Z","lastTransitionTime":"2026-02-03T07:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.492767 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.554634 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.569432 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.581393 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.593622 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.596647 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.597428 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.597443 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.597482 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.597496 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:52Z","lastTransitionTime":"2026-02-03T07:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.609228 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.618540 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.627541 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.640069 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.649736 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.662021 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.675173 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.691737 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.701174 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.701225 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.701234 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.701251 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.701263 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:52Z","lastTransitionTime":"2026-02-03T07:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.708923 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.722576 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.737380 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.751979 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.772402 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.786842 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.802265 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.803955 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.804012 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.804024 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.804042 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.804056 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:52Z","lastTransitionTime":"2026-02-03T07:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.821516 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.907986 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.908054 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.908068 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.908092 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:52 crc kubenswrapper[4708]: I0203 07:10:52.908111 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:52Z","lastTransitionTime":"2026-02-03T07:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.011472 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.011530 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.011541 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.011557 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.011568 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:53Z","lastTransitionTime":"2026-02-03T07:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.078321 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 02:31:24.168581966 +0000 UTC Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.092664 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:53 crc kubenswrapper[4708]: E0203 07:10:53.092862 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.113634 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.113684 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.113698 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.113717 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.113731 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:53Z","lastTransitionTime":"2026-02-03T07:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.173171 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.215985 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.216034 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.216045 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.216063 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.216076 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:53Z","lastTransitionTime":"2026-02-03T07:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.319349 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.319383 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.319394 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.319409 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.319418 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:53Z","lastTransitionTime":"2026-02-03T07:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.343049 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" event={"ID":"9b5667f2-69df-408c-81af-c50c160ad409","Type":"ContainerStarted","Data":"7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.343471 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.359954 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.368242 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.375073 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.386424 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.397483 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.410990 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.422176 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.422240 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.422256 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.422279 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.422298 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:53Z","lastTransitionTime":"2026-02-03T07:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.428494 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.441487 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.457634 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.470319 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.516196 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.524839 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.524877 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.524886 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.524899 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.524910 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:53Z","lastTransitionTime":"2026-02-03T07:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.529347 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.545273 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.563632 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.582453 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.595735 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.608578 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.618847 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.627046 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.627089 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.627100 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.627117 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.627131 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:53Z","lastTransitionTime":"2026-02-03T07:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.631460 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.645219 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.657772 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.670614 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.686897 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.709512 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.725087 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.730233 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.730298 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.730313 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.730339 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.730356 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:53Z","lastTransitionTime":"2026-02-03T07:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.741969 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.756768 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.772308 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.788922 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:53Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.833089 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.833136 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.833146 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.833161 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.833171 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:53Z","lastTransitionTime":"2026-02-03T07:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.935929 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.935994 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.936006 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.936027 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:53 crc kubenswrapper[4708]: I0203 07:10:53.936041 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:53Z","lastTransitionTime":"2026-02-03T07:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.038999 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.039046 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.039057 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.039072 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.039083 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:54Z","lastTransitionTime":"2026-02-03T07:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.079502 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 15:36:40.011659251 +0000 UTC Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.093210 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.093272 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:54 crc kubenswrapper[4708]: E0203 07:10:54.093413 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:10:54 crc kubenswrapper[4708]: E0203 07:10:54.093547 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.141367 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.141408 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.141416 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.141433 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.141445 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:54Z","lastTransitionTime":"2026-02-03T07:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.244578 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.244623 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.244632 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.244648 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.244660 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:54Z","lastTransitionTime":"2026-02-03T07:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.349172 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.349209 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.349224 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.349242 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.349258 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:54Z","lastTransitionTime":"2026-02-03T07:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.451496 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.451550 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.451562 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.451578 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.451588 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:54Z","lastTransitionTime":"2026-02-03T07:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.554599 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.554651 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.554662 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.554677 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.554689 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:54Z","lastTransitionTime":"2026-02-03T07:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.657270 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.657304 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.657313 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.657327 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.657336 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:54Z","lastTransitionTime":"2026-02-03T07:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.760278 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.760328 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.760340 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.760358 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.760370 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:54Z","lastTransitionTime":"2026-02-03T07:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.863075 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.863131 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.863149 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.863172 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.863189 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:54Z","lastTransitionTime":"2026-02-03T07:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.965739 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.965808 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.965822 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.965844 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:54 crc kubenswrapper[4708]: I0203 07:10:54.965857 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:54Z","lastTransitionTime":"2026-02-03T07:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.069162 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.069229 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.069248 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.069272 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.069290 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:55Z","lastTransitionTime":"2026-02-03T07:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.080436 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 00:37:08.694296962 +0000 UTC Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.092763 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:55 crc kubenswrapper[4708]: E0203 07:10:55.092928 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.172681 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.172726 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.172738 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.172755 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.172769 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:55Z","lastTransitionTime":"2026-02-03T07:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.275403 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.275465 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.275486 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.275511 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.275533 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:55Z","lastTransitionTime":"2026-02-03T07:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.352670 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/0.log" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.355780 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928" exitCode=1 Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.355851 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.357077 4708 scope.go:117] "RemoveContainer" containerID="86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.378458 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.378838 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.378883 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.378907 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.378926 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.378941 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:55Z","lastTransitionTime":"2026-02-03T07:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.391683 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.407264 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.420221 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.437380 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.450840 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.464666 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.479433 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.481284 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.481320 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.481335 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.481358 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.481412 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:55Z","lastTransitionTime":"2026-02-03T07:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.493968 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.510616 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.525322 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.545520 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:54Z\\\",\\\"message\\\":\\\"I0203 07:10:54.894878 5995 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:10:54.894896 5995 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:10:54.894921 5995 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 07:10:54.894929 5995 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 07:10:54.894960 5995 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 07:10:54.896598 5995 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 07:10:54.896613 5995 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:10:54.896630 5995 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 07:10:54.896648 5995 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:10:54.896624 5995 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:10:54.896644 5995 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:10:54.896718 5995 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:10:54.896761 5995 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:10:54.896833 5995 factory.go:656] Stopping watch factory\\\\nI0203 07:10:54.896887 5995 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:10:54.896843 5995 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.558404 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.571133 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:55Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.584514 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.584579 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.584591 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.584616 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.584626 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:55Z","lastTransitionTime":"2026-02-03T07:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.688561 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.688608 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.688620 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.688637 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.688649 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:55Z","lastTransitionTime":"2026-02-03T07:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.791267 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.791304 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.791314 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.791326 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.791335 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:55Z","lastTransitionTime":"2026-02-03T07:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.893726 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.893776 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.893787 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.893819 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.893832 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:55Z","lastTransitionTime":"2026-02-03T07:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.996723 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.996788 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.996833 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.996856 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:55 crc kubenswrapper[4708]: I0203 07:10:55.996867 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:55Z","lastTransitionTime":"2026-02-03T07:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.081485 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 17:52:31.604892881 +0000 UTC Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.091930 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.092032 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:56 crc kubenswrapper[4708]: E0203 07:10:56.092100 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:10:56 crc kubenswrapper[4708]: E0203 07:10:56.092215 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.098413 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.098458 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.098471 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.098490 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.098505 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:56Z","lastTransitionTime":"2026-02-03T07:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.201710 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.201763 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.201771 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.201805 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.201817 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:56Z","lastTransitionTime":"2026-02-03T07:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.304990 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.305029 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.305040 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.305055 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.305068 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:56Z","lastTransitionTime":"2026-02-03T07:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.361139 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/0.log" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.370848 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412"} Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.371247 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.384322 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.395329 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.405473 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.411845 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.411884 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.411897 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.411915 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.411926 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:56Z","lastTransitionTime":"2026-02-03T07:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.414070 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.425285 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.435000 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.445847 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.457121 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.470215 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.493065 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.508828 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.514541 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.514600 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.514611 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.514628 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.514642 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:56Z","lastTransitionTime":"2026-02-03T07:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.521397 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.532637 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.550120 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:54Z\\\",\\\"message\\\":\\\"I0203 07:10:54.894878 5995 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:10:54.894896 5995 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:10:54.894921 5995 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 07:10:54.894929 5995 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 07:10:54.894960 5995 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 07:10:54.896598 5995 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 07:10:54.896613 5995 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:10:54.896630 5995 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 07:10:54.896648 5995 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:10:54.896624 5995 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:10:54.896644 5995 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:10:54.896718 5995 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:10:54.896761 5995 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:10:54.896833 5995 factory.go:656] Stopping watch factory\\\\nI0203 07:10:54.896887 5995 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:10:54.896843 5995 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:56Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.617933 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.617973 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.617982 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.617997 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.618007 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:56Z","lastTransitionTime":"2026-02-03T07:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.720768 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.720817 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.720831 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.720847 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.720858 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:56Z","lastTransitionTime":"2026-02-03T07:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.824651 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.824721 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.824736 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.824755 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.824777 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:56Z","lastTransitionTime":"2026-02-03T07:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.927959 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.928012 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.928022 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.928039 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:56 crc kubenswrapper[4708]: I0203 07:10:56.928050 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:56Z","lastTransitionTime":"2026-02-03T07:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.031420 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.031484 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.031506 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.031538 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.031562 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:57Z","lastTransitionTime":"2026-02-03T07:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.082569 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 17:44:48.948871153 +0000 UTC Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.092311 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:57 crc kubenswrapper[4708]: E0203 07:10:57.092594 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.134901 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.134965 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.134977 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.134996 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.135009 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:57Z","lastTransitionTime":"2026-02-03T07:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.237710 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.237764 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.237777 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.237819 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.237831 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:57Z","lastTransitionTime":"2026-02-03T07:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.340574 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.340660 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.340685 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.340716 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.340738 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:57Z","lastTransitionTime":"2026-02-03T07:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.378906 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/1.log" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.379873 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/0.log" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.382614 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412" exitCode=1 Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.382667 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.382711 4708 scope.go:117] "RemoveContainer" containerID="86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.383464 4708 scope.go:117] "RemoveContainer" containerID="d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412" Feb 03 07:10:57 crc kubenswrapper[4708]: E0203 07:10:57.383658 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.405658 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.423155 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.439874 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.443839 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.443912 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.443928 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.443954 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.443967 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:57Z","lastTransitionTime":"2026-02-03T07:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.456411 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.472344 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.484737 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.504356 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.533752 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:54Z\\\",\\\"message\\\":\\\"I0203 07:10:54.894878 5995 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:10:54.894896 5995 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:10:54.894921 5995 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 07:10:54.894929 5995 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 07:10:54.894960 5995 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 07:10:54.896598 5995 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 07:10:54.896613 5995 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:10:54.896630 5995 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 07:10:54.896648 5995 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:10:54.896624 5995 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:10:54.896644 5995 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:10:54.896718 5995 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:10:54.896761 5995 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:10:54.896833 5995 factory.go:656] Stopping watch factory\\\\nI0203 07:10:54.896887 5995 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:10:54.896843 5995 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.546537 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.546598 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.546608 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.546625 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.546635 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:57Z","lastTransitionTime":"2026-02-03T07:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.555657 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.577428 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.598920 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.609749 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.623706 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.635791 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx"] Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.636344 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.638102 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.638469 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.638681 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.649029 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.649107 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.649118 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.649134 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.649145 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:57Z","lastTransitionTime":"2026-02-03T07:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.654224 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.669996 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.682060 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.694100 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.707109 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.723784 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.737819 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.751101 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.751139 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.751148 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.751161 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.751171 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:57Z","lastTransitionTime":"2026-02-03T07:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.755393 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.759462 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hhbv\" (UniqueName: \"kubernetes.io/projected/c85ceeba-ec54-4325-af45-7a9176cb62a8-kube-api-access-2hhbv\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.759500 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c85ceeba-ec54-4325-af45-7a9176cb62a8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.759558 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c85ceeba-ec54-4325-af45-7a9176cb62a8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.759615 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c85ceeba-ec54-4325-af45-7a9176cb62a8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.769136 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.783753 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.798477 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.813701 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.826217 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.844697 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:54Z\\\",\\\"message\\\":\\\"I0203 07:10:54.894878 5995 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:10:54.894896 5995 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:10:54.894921 5995 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 07:10:54.894929 5995 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 07:10:54.894960 5995 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 07:10:54.896598 5995 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 07:10:54.896613 5995 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:10:54.896630 5995 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 07:10:54.896648 5995 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:10:54.896624 5995 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:10:54.896644 5995 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:10:54.896718 5995 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:10:54.896761 5995 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:10:54.896833 5995 factory.go:656] Stopping watch factory\\\\nI0203 07:10:54.896887 5995 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:10:54.896843 5995 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.853336 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.853376 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.853388 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.853404 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.853415 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:57Z","lastTransitionTime":"2026-02-03T07:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.859909 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:57Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.860033 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c85ceeba-ec54-4325-af45-7a9176cb62a8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.860421 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hhbv\" (UniqueName: \"kubernetes.io/projected/c85ceeba-ec54-4325-af45-7a9176cb62a8-kube-api-access-2hhbv\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.860595 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c85ceeba-ec54-4325-af45-7a9176cb62a8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.860675 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c85ceeba-ec54-4325-af45-7a9176cb62a8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.861185 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c85ceeba-ec54-4325-af45-7a9176cb62a8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.861596 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c85ceeba-ec54-4325-af45-7a9176cb62a8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.867072 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c85ceeba-ec54-4325-af45-7a9176cb62a8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.877110 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hhbv\" (UniqueName: \"kubernetes.io/projected/c85ceeba-ec54-4325-af45-7a9176cb62a8-kube-api-access-2hhbv\") pod \"ovnkube-control-plane-749d76644c-4f7fx\" (UID: \"c85ceeba-ec54-4325-af45-7a9176cb62a8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.950117 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.955621 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.955648 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.955657 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.955672 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:57 crc kubenswrapper[4708]: I0203 07:10:57.955682 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:57Z","lastTransitionTime":"2026-02-03T07:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.058388 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.058435 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.058445 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.058459 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.058470 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:58Z","lastTransitionTime":"2026-02-03T07:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.083274 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 19:44:42.997324885 +0000 UTC Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.092652 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.092715 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:58 crc kubenswrapper[4708]: E0203 07:10:58.092826 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:10:58 crc kubenswrapper[4708]: E0203 07:10:58.092922 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.160743 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.160817 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.160830 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.160849 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.160863 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:58Z","lastTransitionTime":"2026-02-03T07:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.265747 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.265777 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.265813 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.265832 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.265843 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:58Z","lastTransitionTime":"2026-02-03T07:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.368196 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.368255 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.368272 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.368297 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.368314 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:58Z","lastTransitionTime":"2026-02-03T07:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.391844 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" event={"ID":"c85ceeba-ec54-4325-af45-7a9176cb62a8","Type":"ContainerStarted","Data":"74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.391902 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" event={"ID":"c85ceeba-ec54-4325-af45-7a9176cb62a8","Type":"ContainerStarted","Data":"8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.391917 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" event={"ID":"c85ceeba-ec54-4325-af45-7a9176cb62a8","Type":"ContainerStarted","Data":"7482c2ba90ce322d0741b352f5d6dd96a5131e4bff9be5606fd9ed7be02f3d54"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.393334 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/1.log" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.396556 4708 scope.go:117] "RemoveContainer" containerID="d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412" Feb 03 07:10:58 crc kubenswrapper[4708]: E0203 07:10:58.396718 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.414003 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.428080 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.439827 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.451259 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.468467 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.471372 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.471407 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.471416 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.471431 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.471441 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:58Z","lastTransitionTime":"2026-02-03T07:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.480164 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.495403 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.506968 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.523153 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.539274 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.550588 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.562472 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.573639 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.573687 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.573699 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.573716 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.573727 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:58Z","lastTransitionTime":"2026-02-03T07:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.583070 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86cf1f7a5b5beac6d04ff36c945903f08311946a4812e09877a4225f1d265928\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:54Z\\\",\\\"message\\\":\\\"I0203 07:10:54.894878 5995 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:10:54.894896 5995 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:10:54.894921 5995 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0203 07:10:54.894929 5995 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0203 07:10:54.894960 5995 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0203 07:10:54.896598 5995 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0203 07:10:54.896613 5995 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:10:54.896630 5995 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0203 07:10:54.896648 5995 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:10:54.896624 5995 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:10:54.896644 5995 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:10:54.896718 5995 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:10:54.896761 5995 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:10:54.896833 5995 factory.go:656] Stopping watch factory\\\\nI0203 07:10:54.896887 5995 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:10:54.896843 5995 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.596362 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.607138 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.619287 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.632780 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.650373 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.663568 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.676183 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.676225 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.676233 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.676249 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.676259 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:58Z","lastTransitionTime":"2026-02-03T07:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.676825 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.691238 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.704571 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.722627 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.735546 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.754098 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.768995 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.778629 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.778673 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.778685 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.778701 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.778714 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:58Z","lastTransitionTime":"2026-02-03T07:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.783326 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.792941 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.804998 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.816363 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:58Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.881820 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.881865 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.881877 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.881894 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.881906 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:58Z","lastTransitionTime":"2026-02-03T07:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.984244 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.984276 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.984285 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.984299 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:58 crc kubenswrapper[4708]: I0203 07:10:58.984311 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:58Z","lastTransitionTime":"2026-02-03T07:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.083566 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 14:40:20.318845738 +0000 UTC Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.087385 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.087440 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.087448 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.087462 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.087472 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.092851 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.092998 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.095789 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-6thl9"] Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.096316 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.096394 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.115179 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.127517 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.142747 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.163143 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.177157 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.190310 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.190352 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.190365 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.190381 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.190393 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.191287 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.201612 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.216405 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.231538 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.243195 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.254499 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.269227 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.272944 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.272980 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djvg7\" (UniqueName: \"kubernetes.io/projected/851add34-7566-4ed5-b70a-c7935eb26e4f-kube-api-access-djvg7\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.284255 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.293608 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.293659 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.293675 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.293697 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.293712 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.299877 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.319649 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.331713 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.374446 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.374773 4708 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.374951 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs podName:851add34-7566-4ed5-b70a-c7935eb26e4f nodeName:}" failed. No retries permitted until 2026-02-03 07:10:59.874919607 +0000 UTC m=+38.856866614 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs") pod "network-metrics-daemon-6thl9" (UID: "851add34-7566-4ed5-b70a-c7935eb26e4f") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.374611 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djvg7\" (UniqueName: \"kubernetes.io/projected/851add34-7566-4ed5-b70a-c7935eb26e4f-kube-api-access-djvg7\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.394249 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djvg7\" (UniqueName: \"kubernetes.io/projected/851add34-7566-4ed5-b70a-c7935eb26e4f-kube-api-access-djvg7\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.395936 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.395962 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.395974 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.395993 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.396007 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.498841 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.499199 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.499277 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.499353 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.499432 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.602112 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.602149 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.602159 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.602173 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.602183 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.678854 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.679223 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:11:15.67914985 +0000 UTC m=+54.661096707 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.679467 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.679592 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.679731 4708 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.679784 4708 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.679898 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:11:15.67987767 +0000 UTC m=+54.661824547 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.680003 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:11:15.679980903 +0000 UTC m=+54.661927780 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.705063 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.705188 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.705203 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.705219 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.705235 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.765236 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.765288 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.765306 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.765328 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.765345 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.779277 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.780159 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.780282 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.780310 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.780326 4708 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.780377 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 07:11:15.780360165 +0000 UTC m=+54.762306972 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.780511 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.780540 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.780555 4708 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.780625 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 07:11:15.780607482 +0000 UTC m=+54.762554299 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.780292 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.783336 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.783375 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.783393 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.783412 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.783427 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.796102 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.799847 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.799896 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.799909 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.799928 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.799943 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.812972 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.816954 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.816989 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.816998 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.817012 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.817023 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.831763 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.835823 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.835865 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.835875 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.835889 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.835901 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.855572 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:10:59Z is after 2025-08-24T17:21:41Z" Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.855779 4708 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.857632 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.857673 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.857685 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.857705 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.857719 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.882078 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.882312 4708 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: E0203 07:10:59.882439 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs podName:851add34-7566-4ed5-b70a-c7935eb26e4f nodeName:}" failed. No retries permitted until 2026-02-03 07:11:00.882416034 +0000 UTC m=+39.864362901 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs") pod "network-metrics-daemon-6thl9" (UID: "851add34-7566-4ed5-b70a-c7935eb26e4f") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.960706 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.960752 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.960761 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.960776 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:10:59 crc kubenswrapper[4708]: I0203 07:10:59.960787 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:10:59Z","lastTransitionTime":"2026-02-03T07:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.063602 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.063655 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.063669 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.063724 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.063743 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:00Z","lastTransitionTime":"2026-02-03T07:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.084710 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 14:38:21.000224912 +0000 UTC Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.092612 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.092913 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:00 crc kubenswrapper[4708]: E0203 07:11:00.093038 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.093149 4708 scope.go:117] "RemoveContainer" containerID="79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802" Feb 03 07:11:00 crc kubenswrapper[4708]: E0203 07:11:00.093256 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.167541 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.167578 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.167588 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.167602 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.167612 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:00Z","lastTransitionTime":"2026-02-03T07:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.270019 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.270064 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.270080 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.270101 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.270119 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:00Z","lastTransitionTime":"2026-02-03T07:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.375488 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.375529 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.375538 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.375556 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.375569 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:00Z","lastTransitionTime":"2026-02-03T07:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.404622 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.406171 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.407161 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.421464 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.433235 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.447138 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.459490 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.476489 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.479275 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.479327 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.479344 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.479370 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.479388 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:00Z","lastTransitionTime":"2026-02-03T07:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.487745 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.504123 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.523935 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.536640 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.549847 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.561539 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.571091 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.581525 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.581562 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.581574 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.581589 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.581602 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:00Z","lastTransitionTime":"2026-02-03T07:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.584027 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.595213 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.605441 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.614435 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:00Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.684427 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.684464 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.684474 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.684490 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.684501 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:00Z","lastTransitionTime":"2026-02-03T07:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.787901 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.787941 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.787951 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.787967 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.787980 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:00Z","lastTransitionTime":"2026-02-03T07:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.890960 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.891005 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.891019 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.891038 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.891053 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:00Z","lastTransitionTime":"2026-02-03T07:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.892020 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:00 crc kubenswrapper[4708]: E0203 07:11:00.892205 4708 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:11:00 crc kubenswrapper[4708]: E0203 07:11:00.892273 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs podName:851add34-7566-4ed5-b70a-c7935eb26e4f nodeName:}" failed. No retries permitted until 2026-02-03 07:11:02.892256666 +0000 UTC m=+41.874203493 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs") pod "network-metrics-daemon-6thl9" (UID: "851add34-7566-4ed5-b70a-c7935eb26e4f") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.994935 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.995012 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.995035 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.995066 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:00 crc kubenswrapper[4708]: I0203 07:11:00.995087 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:00Z","lastTransitionTime":"2026-02-03T07:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.085652 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 00:41:32.670823112 +0000 UTC Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.092201 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:01 crc kubenswrapper[4708]: E0203 07:11:01.092373 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.092201 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:01 crc kubenswrapper[4708]: E0203 07:11:01.092559 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.097839 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.097920 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.097946 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.097978 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.098003 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:01Z","lastTransitionTime":"2026-02-03T07:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.200316 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.200363 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.200374 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.200389 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.200400 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:01Z","lastTransitionTime":"2026-02-03T07:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.303120 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.303155 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.303162 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.303178 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.303187 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:01Z","lastTransitionTime":"2026-02-03T07:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.406114 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.406182 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.406198 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.406214 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.406224 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:01Z","lastTransitionTime":"2026-02-03T07:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.508518 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.508567 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.508575 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.508591 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.508601 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:01Z","lastTransitionTime":"2026-02-03T07:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.617390 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.617447 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.617462 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.617484 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.617500 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:01Z","lastTransitionTime":"2026-02-03T07:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.720965 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.721030 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.721049 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.721069 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.721087 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:01Z","lastTransitionTime":"2026-02-03T07:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.824698 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.824753 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.824765 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.824783 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.824823 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:01Z","lastTransitionTime":"2026-02-03T07:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.927850 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.927934 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.927959 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.927988 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:01 crc kubenswrapper[4708]: I0203 07:11:01.928009 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:01Z","lastTransitionTime":"2026-02-03T07:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.030192 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.030241 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.030251 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.030268 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.030279 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:02Z","lastTransitionTime":"2026-02-03T07:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.087307 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 05:04:07.779817899 +0000 UTC Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.092251 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.092260 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:02 crc kubenswrapper[4708]: E0203 07:11:02.092383 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:02 crc kubenswrapper[4708]: E0203 07:11:02.092476 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.106125 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.118577 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.132978 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.133008 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.133018 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.133032 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.133042 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:02Z","lastTransitionTime":"2026-02-03T07:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.136169 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.167951 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.185510 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.209267 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.222889 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.235229 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.235266 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.235275 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.235289 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.235298 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:02Z","lastTransitionTime":"2026-02-03T07:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.236724 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.248050 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.263159 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.282691 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.298230 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.315195 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.328731 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.338734 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.338776 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.338787 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.338827 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.338845 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:02Z","lastTransitionTime":"2026-02-03T07:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.344599 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.362338 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:02Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.441444 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.441504 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.441518 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.441533 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.441544 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:02Z","lastTransitionTime":"2026-02-03T07:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.544050 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.544098 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.544110 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.544132 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.544145 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:02Z","lastTransitionTime":"2026-02-03T07:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.648149 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.648198 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.648212 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.648233 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.648247 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:02Z","lastTransitionTime":"2026-02-03T07:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.751288 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.751340 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.751350 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.751369 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.751381 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:02Z","lastTransitionTime":"2026-02-03T07:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.854096 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.854162 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.854174 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.854189 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.854201 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:02Z","lastTransitionTime":"2026-02-03T07:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.911018 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:02 crc kubenswrapper[4708]: E0203 07:11:02.911181 4708 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:11:02 crc kubenswrapper[4708]: E0203 07:11:02.911265 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs podName:851add34-7566-4ed5-b70a-c7935eb26e4f nodeName:}" failed. No retries permitted until 2026-02-03 07:11:06.911238917 +0000 UTC m=+45.893185734 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs") pod "network-metrics-daemon-6thl9" (UID: "851add34-7566-4ed5-b70a-c7935eb26e4f") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.956360 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.956418 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.956429 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.956444 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:02 crc kubenswrapper[4708]: I0203 07:11:02.956457 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:02Z","lastTransitionTime":"2026-02-03T07:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.059922 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.059999 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.060016 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.060040 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.060057 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:03Z","lastTransitionTime":"2026-02-03T07:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.088103 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 00:58:06.301670033 +0000 UTC Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.092654 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.092665 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:03 crc kubenswrapper[4708]: E0203 07:11:03.093036 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:03 crc kubenswrapper[4708]: E0203 07:11:03.093269 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.162630 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.162714 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.162730 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.162752 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.162773 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:03Z","lastTransitionTime":"2026-02-03T07:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.265843 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.265915 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.265931 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.265949 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.265966 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:03Z","lastTransitionTime":"2026-02-03T07:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.369206 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.369273 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.369288 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.369308 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.369321 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:03Z","lastTransitionTime":"2026-02-03T07:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.472015 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.472073 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.472085 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.472100 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.472111 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:03Z","lastTransitionTime":"2026-02-03T07:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.574339 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.574428 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.574442 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.574458 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.574471 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:03Z","lastTransitionTime":"2026-02-03T07:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.676972 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.677005 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.677013 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.677026 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.677035 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:03Z","lastTransitionTime":"2026-02-03T07:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.779560 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.779604 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.779614 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.779626 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.779635 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:03Z","lastTransitionTime":"2026-02-03T07:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.882172 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.882222 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.882234 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.882253 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.882265 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:03Z","lastTransitionTime":"2026-02-03T07:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.984628 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.984696 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.984719 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.984744 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:03 crc kubenswrapper[4708]: I0203 07:11:03.984761 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:03Z","lastTransitionTime":"2026-02-03T07:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.086728 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.086754 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.086762 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.086774 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.086784 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:04Z","lastTransitionTime":"2026-02-03T07:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.089079 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 19:12:39.835694136 +0000 UTC Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.093032 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:04 crc kubenswrapper[4708]: E0203 07:11:04.093209 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.094201 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:04 crc kubenswrapper[4708]: E0203 07:11:04.094597 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.189539 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.189599 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.189615 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.189637 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.189657 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:04Z","lastTransitionTime":"2026-02-03T07:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.291974 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.292043 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.292061 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.292084 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.292101 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:04Z","lastTransitionTime":"2026-02-03T07:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.394097 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.394189 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.394203 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.394219 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.394230 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:04Z","lastTransitionTime":"2026-02-03T07:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.496658 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.496708 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.496724 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.496746 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.496762 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:04Z","lastTransitionTime":"2026-02-03T07:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.599356 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.599420 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.599437 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.599462 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.599481 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:04Z","lastTransitionTime":"2026-02-03T07:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.701772 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.701858 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.701871 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.701885 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.701896 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:04Z","lastTransitionTime":"2026-02-03T07:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.804667 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.804713 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.804722 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.804738 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.804748 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:04Z","lastTransitionTime":"2026-02-03T07:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.907065 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.907114 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.907128 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.907148 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:04 crc kubenswrapper[4708]: I0203 07:11:04.907159 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:04Z","lastTransitionTime":"2026-02-03T07:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.009782 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.009855 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.009870 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.009891 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.009907 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:05Z","lastTransitionTime":"2026-02-03T07:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.089725 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 12:51:49.058464414 +0000 UTC Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.092154 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:05 crc kubenswrapper[4708]: E0203 07:11:05.092463 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.092633 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:05 crc kubenswrapper[4708]: E0203 07:11:05.093437 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.111865 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.111910 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.111921 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.111937 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.111948 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:05Z","lastTransitionTime":"2026-02-03T07:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.214357 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.214412 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.214426 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.214449 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.214462 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:05Z","lastTransitionTime":"2026-02-03T07:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.317076 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.317116 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.317128 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.317147 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.317163 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:05Z","lastTransitionTime":"2026-02-03T07:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.420011 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.420061 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.420074 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.420094 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.420107 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:05Z","lastTransitionTime":"2026-02-03T07:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.522826 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.522865 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.522874 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.522889 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.522903 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:05Z","lastTransitionTime":"2026-02-03T07:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.625510 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.625546 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.625556 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.625571 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.625581 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:05Z","lastTransitionTime":"2026-02-03T07:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.728244 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.728279 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.728288 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.728303 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.728313 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:05Z","lastTransitionTime":"2026-02-03T07:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.830436 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.830488 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.830498 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.830512 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.830522 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:05Z","lastTransitionTime":"2026-02-03T07:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.933226 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.933265 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.933281 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.933298 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:05 crc kubenswrapper[4708]: I0203 07:11:05.933309 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:05Z","lastTransitionTime":"2026-02-03T07:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.035628 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.035670 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.035680 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.035694 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.035704 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:06Z","lastTransitionTime":"2026-02-03T07:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.090094 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 03:22:22.900376339 +0000 UTC Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.092414 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.092492 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:06 crc kubenswrapper[4708]: E0203 07:11:06.092557 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:06 crc kubenswrapper[4708]: E0203 07:11:06.092628 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.138844 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.138906 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.138918 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.138934 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.138946 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:06Z","lastTransitionTime":"2026-02-03T07:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.241823 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.241888 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.241897 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.241911 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.241924 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:06Z","lastTransitionTime":"2026-02-03T07:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.345228 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.345316 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.345346 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.345379 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.345402 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:06Z","lastTransitionTime":"2026-02-03T07:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.447682 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.448065 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.448076 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.448093 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.448109 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:06Z","lastTransitionTime":"2026-02-03T07:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.550839 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.550896 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.550912 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.550935 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.550953 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:06Z","lastTransitionTime":"2026-02-03T07:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.654322 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.654363 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.654377 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.654404 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.654417 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:06Z","lastTransitionTime":"2026-02-03T07:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.757131 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.757196 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.757213 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.757236 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.757252 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:06Z","lastTransitionTime":"2026-02-03T07:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.860786 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.860861 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.860883 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.860914 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.860937 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:06Z","lastTransitionTime":"2026-02-03T07:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.950990 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:06 crc kubenswrapper[4708]: E0203 07:11:06.951224 4708 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:11:06 crc kubenswrapper[4708]: E0203 07:11:06.951325 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs podName:851add34-7566-4ed5-b70a-c7935eb26e4f nodeName:}" failed. No retries permitted until 2026-02-03 07:11:14.951296887 +0000 UTC m=+53.933243734 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs") pod "network-metrics-daemon-6thl9" (UID: "851add34-7566-4ed5-b70a-c7935eb26e4f") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.963972 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.964036 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.964052 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.964115 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:06 crc kubenswrapper[4708]: I0203 07:11:06.964137 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:06Z","lastTransitionTime":"2026-02-03T07:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.067252 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.067295 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.067306 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.067320 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.067329 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:07Z","lastTransitionTime":"2026-02-03T07:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.090472 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 17:27:49.13633909 +0000 UTC Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.091898 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.091929 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:07 crc kubenswrapper[4708]: E0203 07:11:07.092026 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:07 crc kubenswrapper[4708]: E0203 07:11:07.092281 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.169572 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.169643 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.169664 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.169697 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.169719 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:07Z","lastTransitionTime":"2026-02-03T07:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.272233 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.272275 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.272291 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.272312 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.272330 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:07Z","lastTransitionTime":"2026-02-03T07:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.375260 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.375323 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.375346 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.375371 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.375388 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:07Z","lastTransitionTime":"2026-02-03T07:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.478676 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.478907 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.478949 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.478979 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.479001 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:07Z","lastTransitionTime":"2026-02-03T07:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.581662 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.581725 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.581744 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.581767 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.581784 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:07Z","lastTransitionTime":"2026-02-03T07:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.685015 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.685055 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.685067 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.685081 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.685090 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:07Z","lastTransitionTime":"2026-02-03T07:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.788403 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.788453 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.788482 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.788503 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.788516 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:07Z","lastTransitionTime":"2026-02-03T07:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.890748 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.890846 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.890857 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.890872 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.890881 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:07Z","lastTransitionTime":"2026-02-03T07:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.994285 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.994363 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.994374 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.994424 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:07 crc kubenswrapper[4708]: I0203 07:11:07.994435 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:07Z","lastTransitionTime":"2026-02-03T07:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.091647 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 06:24:33.750280163 +0000 UTC Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.091985 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.092062 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:08 crc kubenswrapper[4708]: E0203 07:11:08.092117 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:08 crc kubenswrapper[4708]: E0203 07:11:08.092238 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.096199 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.096242 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.096250 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.096266 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.096276 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:08Z","lastTransitionTime":"2026-02-03T07:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.198817 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.198857 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.198865 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.198879 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.198887 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:08Z","lastTransitionTime":"2026-02-03T07:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.302140 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.302214 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.302236 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.302267 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.302288 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:08Z","lastTransitionTime":"2026-02-03T07:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.405464 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.405525 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.405539 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.405561 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.405576 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:08Z","lastTransitionTime":"2026-02-03T07:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.508224 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.508279 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.508293 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.508314 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.508330 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:08Z","lastTransitionTime":"2026-02-03T07:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.611129 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.611225 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.611242 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.611264 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.611282 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:08Z","lastTransitionTime":"2026-02-03T07:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.713353 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.713397 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.713435 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.713453 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.713464 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:08Z","lastTransitionTime":"2026-02-03T07:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.816402 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.816458 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.816470 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.816487 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.816500 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:08Z","lastTransitionTime":"2026-02-03T07:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.918894 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.918989 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.919013 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.919045 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:08 crc kubenswrapper[4708]: I0203 07:11:08.919069 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:08Z","lastTransitionTime":"2026-02-03T07:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.022546 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.022590 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.022600 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.022616 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.022625 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:09Z","lastTransitionTime":"2026-02-03T07:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.092669 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 02:55:10.128305075 +0000 UTC Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.092744 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.092840 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:09 crc kubenswrapper[4708]: E0203 07:11:09.093063 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:09 crc kubenswrapper[4708]: E0203 07:11:09.093183 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.125366 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.125427 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.125445 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.125469 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.125488 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:09Z","lastTransitionTime":"2026-02-03T07:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.228239 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.228278 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.228289 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.228304 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.228315 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:09Z","lastTransitionTime":"2026-02-03T07:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.331417 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.331475 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.331486 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.331502 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.331513 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:09Z","lastTransitionTime":"2026-02-03T07:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.434057 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.434099 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.434109 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.434124 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.434136 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:09Z","lastTransitionTime":"2026-02-03T07:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.536691 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.536766 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.536852 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.536895 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.536913 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:09Z","lastTransitionTime":"2026-02-03T07:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.639253 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.639320 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.639338 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.639361 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.639379 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:09Z","lastTransitionTime":"2026-02-03T07:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.741699 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.741748 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.741759 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.741779 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.741819 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:09Z","lastTransitionTime":"2026-02-03T07:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.845483 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.845590 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.845607 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.845648 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.845660 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:09Z","lastTransitionTime":"2026-02-03T07:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.948365 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.948424 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.948444 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.948468 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:09 crc kubenswrapper[4708]: I0203 07:11:09.948487 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:09Z","lastTransitionTime":"2026-02-03T07:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.051111 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.051160 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.051170 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.051189 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.051200 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.062104 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.071360 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.079756 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.092657 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:10 crc kubenswrapper[4708]: E0203 07:11:10.092783 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.092657 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:10 crc kubenswrapper[4708]: E0203 07:11:10.092881 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.092916 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 13:18:43.463806778 +0000 UTC Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.094241 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.106770 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.121074 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.131462 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.142631 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.152249 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.154080 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.154132 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.154142 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.154158 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.154169 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.166665 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.178163 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.178208 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.178222 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.178240 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.178255 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.180360 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: E0203 07:11:10.189599 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.194444 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.197567 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.197617 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.197632 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.197652 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.197663 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: E0203 07:11:10.209611 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.210355 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.214694 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.214910 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.215002 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.215097 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.215197 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.225464 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: E0203 07:11:10.226225 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.229736 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.229766 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.229778 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.229806 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.229819 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.237433 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: E0203 07:11:10.242734 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.246339 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.246381 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.246392 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.246411 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.246423 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.249829 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: E0203 07:11:10.257775 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: E0203 07:11:10.257943 4708 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.259725 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.259813 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.259829 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.259879 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.259897 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.263313 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.280208 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:10Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.362156 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.362213 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.362225 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.362240 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.362249 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.464703 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.464772 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.464789 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.464848 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.464865 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.568618 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.568676 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.568693 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.568719 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.568738 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.671559 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.671633 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.671658 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.671688 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.671710 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.775592 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.775662 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.775689 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.775714 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.775731 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.878629 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.878682 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.878698 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.878718 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.878731 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.982423 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.982498 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.982521 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.982553 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:10 crc kubenswrapper[4708]: I0203 07:11:10.982575 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:10Z","lastTransitionTime":"2026-02-03T07:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.086750 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.086828 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.086846 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.086869 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.086884 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:11Z","lastTransitionTime":"2026-02-03T07:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.092784 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.092876 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:11 crc kubenswrapper[4708]: E0203 07:11:11.092987 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:11 crc kubenswrapper[4708]: E0203 07:11:11.093078 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.093128 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 12:47:39.468760839 +0000 UTC Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.190249 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.190300 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.190314 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.190331 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.190344 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:11Z","lastTransitionTime":"2026-02-03T07:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.293360 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.293398 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.293407 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.293420 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.293428 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:11Z","lastTransitionTime":"2026-02-03T07:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.396278 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.396314 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.396326 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.396342 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.396354 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:11Z","lastTransitionTime":"2026-02-03T07:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.498566 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.498603 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.498611 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.498642 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.498651 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:11Z","lastTransitionTime":"2026-02-03T07:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.601018 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.601058 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.601069 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.601085 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.601096 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:11Z","lastTransitionTime":"2026-02-03T07:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.703915 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.704002 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.704029 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.704061 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.704093 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:11Z","lastTransitionTime":"2026-02-03T07:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.807171 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.807242 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.807264 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.807293 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.807317 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:11Z","lastTransitionTime":"2026-02-03T07:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.910277 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.910329 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.910345 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.910549 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:11 crc kubenswrapper[4708]: I0203 07:11:11.910568 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:11Z","lastTransitionTime":"2026-02-03T07:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.013633 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.013703 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.013727 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.013761 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.013784 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:12Z","lastTransitionTime":"2026-02-03T07:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.092345 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:12 crc kubenswrapper[4708]: E0203 07:11:12.092480 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.092843 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:12 crc kubenswrapper[4708]: E0203 07:11:12.093007 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.093261 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 03:38:18.376467959 +0000 UTC Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.106869 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.115662 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.115704 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.115718 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.115736 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.115748 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:12Z","lastTransitionTime":"2026-02-03T07:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.125010 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.146787 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.161893 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.179208 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.201481 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.216890 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.218080 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.218113 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.218126 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.218143 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.218154 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:12Z","lastTransitionTime":"2026-02-03T07:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.228527 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.238586 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.252333 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.264061 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.277544 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.287397 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.297949 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.309943 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.319566 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.320411 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.320450 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.320461 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.320478 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.320490 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:12Z","lastTransitionTime":"2026-02-03T07:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.331706 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:12Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.423173 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.423220 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.423230 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.423246 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.423254 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:12Z","lastTransitionTime":"2026-02-03T07:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.526959 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.527026 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.527043 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.527067 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.527084 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:12Z","lastTransitionTime":"2026-02-03T07:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.630019 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.630071 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.630082 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.630098 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.630110 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:12Z","lastTransitionTime":"2026-02-03T07:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.732907 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.732971 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.732994 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.733022 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.733047 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:12Z","lastTransitionTime":"2026-02-03T07:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.835962 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.836058 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.836081 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.836111 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.836133 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:12Z","lastTransitionTime":"2026-02-03T07:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.938729 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.938763 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.938773 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.938788 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:12 crc kubenswrapper[4708]: I0203 07:11:12.938825 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:12Z","lastTransitionTime":"2026-02-03T07:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.040927 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.040989 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.041005 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.041026 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.041043 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:13Z","lastTransitionTime":"2026-02-03T07:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.092412 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.092481 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:13 crc kubenswrapper[4708]: E0203 07:11:13.092552 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:13 crc kubenswrapper[4708]: E0203 07:11:13.092653 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.093583 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 19:58:15.819149019 +0000 UTC Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.143978 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.144038 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.144050 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.144069 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.144086 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:13Z","lastTransitionTime":"2026-02-03T07:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.245947 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.245986 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.245995 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.246009 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.246018 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:13Z","lastTransitionTime":"2026-02-03T07:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.349178 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.349535 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.349863 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.350043 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.350206 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:13Z","lastTransitionTime":"2026-02-03T07:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.453834 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.453883 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.453896 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.453918 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.453931 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:13Z","lastTransitionTime":"2026-02-03T07:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.556574 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.557019 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.557182 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.557334 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.557468 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:13Z","lastTransitionTime":"2026-02-03T07:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.660296 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.660371 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.660389 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.660416 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.660434 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:13Z","lastTransitionTime":"2026-02-03T07:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.763015 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.763053 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.763061 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.763075 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.763085 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:13Z","lastTransitionTime":"2026-02-03T07:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.865919 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.866245 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.866355 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.866465 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.866596 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:13Z","lastTransitionTime":"2026-02-03T07:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.969658 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.969712 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.969721 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.969741 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:13 crc kubenswrapper[4708]: I0203 07:11:13.969751 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:13Z","lastTransitionTime":"2026-02-03T07:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.072595 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.072645 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.072660 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.072677 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.072690 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:14Z","lastTransitionTime":"2026-02-03T07:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.092612 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.092645 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:14 crc kubenswrapper[4708]: E0203 07:11:14.093074 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:14 crc kubenswrapper[4708]: E0203 07:11:14.093151 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.093365 4708 scope.go:117] "RemoveContainer" containerID="d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.093731 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 01:39:08.195543218 +0000 UTC Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.176023 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.176060 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.176071 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.176087 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.176097 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:14Z","lastTransitionTime":"2026-02-03T07:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.281572 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.281617 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.281634 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.281656 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.281672 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:14Z","lastTransitionTime":"2026-02-03T07:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.384749 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.384835 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.384849 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.384873 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.384891 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:14Z","lastTransitionTime":"2026-02-03T07:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.462380 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/1.log" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.467963 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb"} Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.468401 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.487429 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.488449 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.488530 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.488547 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.488566 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.488611 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:14Z","lastTransitionTime":"2026-02-03T07:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.515103 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.530894 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.553029 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.567115 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.581003 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.591612 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.591669 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.591680 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.591692 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.591701 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:14Z","lastTransitionTime":"2026-02-03T07:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.598291 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.622105 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.647319 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.662540 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.680855 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.693712 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.693765 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.693776 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.693818 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.693834 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:14Z","lastTransitionTime":"2026-02-03T07:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.695718 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.708769 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.721964 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.735495 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.749442 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.772317 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:14Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.796997 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.797070 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.797085 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.797104 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.797120 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:14Z","lastTransitionTime":"2026-02-03T07:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.899960 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.900000 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.900008 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.900024 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:14 crc kubenswrapper[4708]: I0203 07:11:14.900033 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:14Z","lastTransitionTime":"2026-02-03T07:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.003204 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.003267 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.003279 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.003299 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.003310 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:15Z","lastTransitionTime":"2026-02-03T07:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.041104 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.041752 4708 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.041871 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs podName:851add34-7566-4ed5-b70a-c7935eb26e4f nodeName:}" failed. No retries permitted until 2026-02-03 07:11:31.041847378 +0000 UTC m=+70.023794185 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs") pod "network-metrics-daemon-6thl9" (UID: "851add34-7566-4ed5-b70a-c7935eb26e4f") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.092740 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.092898 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.092755 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.093185 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.094748 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 11:35:36.404157025 +0000 UTC Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.105459 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.105489 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.105497 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.105512 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.105524 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:15Z","lastTransitionTime":"2026-02-03T07:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.155313 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.170466 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.184838 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.200426 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.208546 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.208618 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.208637 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.208668 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.208697 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:15Z","lastTransitionTime":"2026-02-03T07:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.225122 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.247536 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.267557 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.283242 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.297041 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.311143 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.311185 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.311196 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.311217 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.311228 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:15Z","lastTransitionTime":"2026-02-03T07:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.313142 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.324605 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.339653 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.354432 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.370307 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.386688 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.402111 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.414327 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.414374 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.414385 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.414406 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.414420 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:15Z","lastTransitionTime":"2026-02-03T07:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.418557 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.431579 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.473485 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/2.log" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.474584 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/1.log" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.477640 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb" exitCode=1 Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.477710 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.477820 4708 scope.go:117] "RemoveContainer" containerID="d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.478680 4708 scope.go:117] "RemoveContainer" containerID="5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb" Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.478992 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.501215 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.516621 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.516671 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.516683 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.516712 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.516729 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:15Z","lastTransitionTime":"2026-02-03T07:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.519571 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.532592 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.553715 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.566360 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.581387 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.595125 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.612216 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.619853 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.619916 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.619973 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.620007 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.620032 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:15Z","lastTransitionTime":"2026-02-03T07:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.628915 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.649942 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.667167 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.681503 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.695067 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.708357 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.722610 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.722653 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.722666 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.722681 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.722689 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:15Z","lastTransitionTime":"2026-02-03T07:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.727327 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.743986 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.750462 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.750640 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.750706 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.750865 4708 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.750873 4708 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.750947 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:11:47.750924544 +0000 UTC m=+86.732871361 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.750973 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:11:47.750961705 +0000 UTC m=+86.732908532 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.751266 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:11:47.751246953 +0000 UTC m=+86.733193790 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.765225 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d996b8cfac56b8d3d4a7d95b6203642a3c7ebbe119dfbc1c7c05a7c589a5b412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:10:56Z\\\",\\\"message\\\":\\\"rnetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355339 6132 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355440 6132 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0203 07:10:56.355524 6132 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355621 6132 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0203 07:10:56.355814 6132 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.355834 6132 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356014 6132 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:10:56.356594 6132 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:11:15.068829 6392 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0203 07:11:15.068895 6392 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:11:15.068902 6392 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:11:15.068917 6392 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 07:11:15.068922 6392 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 07:11:15.068916 6392 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:11:15.068946 6392 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:11:15.068954 6392 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 07:11:15.068962 6392 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:11:15.068973 6392 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07:11:15.068984 6392 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:11:15.068993 6392 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:11:15.069003 6392 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:11:15.069021 6392 factory.go:656] Stopping watch factory\\\\nI0203 07:11:15.069038 6392 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:11:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:15Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.830369 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.830444 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.830461 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.830500 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.830517 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:15Z","lastTransitionTime":"2026-02-03T07:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.851396 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.851672 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.851714 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.851734 4708 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.851823 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 07:11:47.851775137 +0000 UTC m=+86.833721974 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.851997 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.852070 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.852087 4708 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:11:15 crc kubenswrapper[4708]: E0203 07:11:15.852168 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 07:11:47.852144507 +0000 UTC m=+86.834091414 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.852431 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.933597 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.933641 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.933652 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.933666 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:15 crc kubenswrapper[4708]: I0203 07:11:15.933677 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:15Z","lastTransitionTime":"2026-02-03T07:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.036251 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.036324 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.036346 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.036375 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.036397 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:16Z","lastTransitionTime":"2026-02-03T07:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.092036 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.092050 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:16 crc kubenswrapper[4708]: E0203 07:11:16.092234 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:16 crc kubenswrapper[4708]: E0203 07:11:16.092352 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.094913 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 22:48:52.720641483 +0000 UTC Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.139248 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.139309 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.139325 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.139343 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.139353 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:16Z","lastTransitionTime":"2026-02-03T07:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.242075 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.242109 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.242121 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.242137 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.242149 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:16Z","lastTransitionTime":"2026-02-03T07:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.344712 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.344762 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.344774 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.344806 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.344816 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:16Z","lastTransitionTime":"2026-02-03T07:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.448461 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.448501 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.448512 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.448528 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.448540 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:16Z","lastTransitionTime":"2026-02-03T07:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.485458 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/2.log" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.489214 4708 scope.go:117] "RemoveContainer" containerID="5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb" Feb 03 07:11:16 crc kubenswrapper[4708]: E0203 07:11:16.489401 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.505395 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.519022 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.531614 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.546252 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.551915 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.551959 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.551978 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.551999 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.552013 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:16Z","lastTransitionTime":"2026-02-03T07:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.582656 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:11:15.068829 6392 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0203 07:11:15.068895 6392 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:11:15.068902 6392 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:11:15.068917 6392 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 07:11:15.068922 6392 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 07:11:15.068916 6392 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:11:15.068946 6392 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:11:15.068954 6392 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 07:11:15.068962 6392 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:11:15.068973 6392 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07:11:15.068984 6392 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:11:15.068993 6392 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:11:15.069003 6392 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:11:15.069021 6392 factory.go:656] Stopping watch factory\\\\nI0203 07:11:15.069038 6392 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:11:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.605341 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.626187 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.644968 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.654831 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.654898 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.654923 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.654953 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.654976 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:16Z","lastTransitionTime":"2026-02-03T07:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.664305 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.678438 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.692416 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.704029 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.721157 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.737132 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.751211 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.757884 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.757929 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.757941 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.757956 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.757969 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:16Z","lastTransitionTime":"2026-02-03T07:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.770282 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.783983 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:16Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.860995 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.861047 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.861060 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.861079 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.861090 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:16Z","lastTransitionTime":"2026-02-03T07:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.963692 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.963723 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.963731 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.963745 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:16 crc kubenswrapper[4708]: I0203 07:11:16.963755 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:16Z","lastTransitionTime":"2026-02-03T07:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.066938 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.066972 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.066982 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.066998 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.067009 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:17Z","lastTransitionTime":"2026-02-03T07:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.091997 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:17 crc kubenswrapper[4708]: E0203 07:11:17.092167 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.092431 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:17 crc kubenswrapper[4708]: E0203 07:11:17.092565 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.095424 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 13:00:21.874143466 +0000 UTC Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.170593 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.170646 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.170663 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.170686 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.170702 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:17Z","lastTransitionTime":"2026-02-03T07:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.274029 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.274082 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.274094 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.274113 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.274126 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:17Z","lastTransitionTime":"2026-02-03T07:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.377760 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.377893 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.377913 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.377961 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.377980 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:17Z","lastTransitionTime":"2026-02-03T07:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.480275 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.480321 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.480341 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.480356 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.480366 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:17Z","lastTransitionTime":"2026-02-03T07:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.584106 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.584159 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.584176 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.584200 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.584218 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:17Z","lastTransitionTime":"2026-02-03T07:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.687740 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.687839 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.687854 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.687874 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.687886 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:17Z","lastTransitionTime":"2026-02-03T07:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.791000 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.791050 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.791059 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.791076 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.791087 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:17Z","lastTransitionTime":"2026-02-03T07:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.894724 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.894785 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.894845 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.894871 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.894888 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:17Z","lastTransitionTime":"2026-02-03T07:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.997282 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.997324 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.997336 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.997351 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:17 crc kubenswrapper[4708]: I0203 07:11:17.997361 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:17Z","lastTransitionTime":"2026-02-03T07:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.092616 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.092630 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:18 crc kubenswrapper[4708]: E0203 07:11:18.092845 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:18 crc kubenswrapper[4708]: E0203 07:11:18.092910 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.095492 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 17:02:40.034762997 +0000 UTC Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.099334 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.099368 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.099379 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.099393 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.099405 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:18Z","lastTransitionTime":"2026-02-03T07:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.202208 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.202244 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.202271 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.202292 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.202305 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:18Z","lastTransitionTime":"2026-02-03T07:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.304708 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.304756 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.304768 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.304783 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.304816 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:18Z","lastTransitionTime":"2026-02-03T07:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.407386 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.407473 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.407497 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.407529 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.407551 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:18Z","lastTransitionTime":"2026-02-03T07:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.509615 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.509669 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.509685 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.509707 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.509726 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:18Z","lastTransitionTime":"2026-02-03T07:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.612762 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.612830 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.612847 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.612863 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.612873 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:18Z","lastTransitionTime":"2026-02-03T07:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.715297 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.715387 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.715406 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.715432 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.715454 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:18Z","lastTransitionTime":"2026-02-03T07:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.818118 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.818158 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.818168 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.818183 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.818194 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:18Z","lastTransitionTime":"2026-02-03T07:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.921475 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.921556 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.921579 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.921610 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:18 crc kubenswrapper[4708]: I0203 07:11:18.921633 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:18Z","lastTransitionTime":"2026-02-03T07:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.024329 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.024398 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.024420 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.024450 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.024477 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:19Z","lastTransitionTime":"2026-02-03T07:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.092582 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.092582 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:19 crc kubenswrapper[4708]: E0203 07:11:19.092767 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:19 crc kubenswrapper[4708]: E0203 07:11:19.093007 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.095831 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 23:18:33.816414231 +0000 UTC Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.129285 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.129340 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.129357 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.129379 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.129400 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:19Z","lastTransitionTime":"2026-02-03T07:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.232241 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.232308 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.232319 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.232336 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.232345 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:19Z","lastTransitionTime":"2026-02-03T07:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.335365 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.335444 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.335479 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.335508 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.335527 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:19Z","lastTransitionTime":"2026-02-03T07:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.438732 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.438780 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.438840 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.438881 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.438903 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:19Z","lastTransitionTime":"2026-02-03T07:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.541388 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.541434 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.541446 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.541461 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.541472 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:19Z","lastTransitionTime":"2026-02-03T07:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.644176 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.644219 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.644233 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.644250 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.644263 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:19Z","lastTransitionTime":"2026-02-03T07:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.747009 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.747063 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.747075 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.747093 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.747107 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:19Z","lastTransitionTime":"2026-02-03T07:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.850091 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.850183 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.850197 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.850217 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.850230 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:19Z","lastTransitionTime":"2026-02-03T07:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.952958 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.953018 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.953028 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.953046 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:19 crc kubenswrapper[4708]: I0203 07:11:19.953056 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:19Z","lastTransitionTime":"2026-02-03T07:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.055700 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.055758 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.055770 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.055789 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.055824 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.092183 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:20 crc kubenswrapper[4708]: E0203 07:11:20.092394 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.092557 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:20 crc kubenswrapper[4708]: E0203 07:11:20.092757 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.097931 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 00:42:12.986422023 +0000 UTC Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.158982 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.159049 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.159067 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.159089 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.159108 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.262164 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.262215 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.262230 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.262251 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.262267 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.365424 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.365501 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.365528 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.365559 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.365585 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.381107 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.381149 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.381159 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.381175 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.381186 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: E0203 07:11:20.401844 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:20Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.407305 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.407367 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.407389 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.407417 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.407434 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: E0203 07:11:20.423195 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:20Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.428602 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.428658 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.428671 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.428694 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.428708 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: E0203 07:11:20.442881 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:20Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.447706 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.447753 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.447764 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.447782 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.447828 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: E0203 07:11:20.462457 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:20Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.468048 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.468141 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.468161 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.468188 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.468205 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: E0203 07:11:20.486212 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:20Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:20 crc kubenswrapper[4708]: E0203 07:11:20.486327 4708 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.488531 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.488608 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.488628 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.488656 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.488672 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.591148 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.591205 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.591221 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.591244 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.591260 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.694550 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.694591 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.694636 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.694654 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.694665 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.797774 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.797891 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.797911 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.797934 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.797949 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.901839 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.901914 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.901937 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.901967 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:20 crc kubenswrapper[4708]: I0203 07:11:20.901984 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:20Z","lastTransitionTime":"2026-02-03T07:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.005454 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.005521 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.005539 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.005565 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.005586 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:21Z","lastTransitionTime":"2026-02-03T07:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.092563 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.092646 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:21 crc kubenswrapper[4708]: E0203 07:11:21.092759 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:21 crc kubenswrapper[4708]: E0203 07:11:21.092902 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.098690 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 19:55:40.215041837 +0000 UTC Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.108516 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.108566 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.108575 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.108593 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.108606 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:21Z","lastTransitionTime":"2026-02-03T07:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.213070 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.213135 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.213149 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.213172 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.213185 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:21Z","lastTransitionTime":"2026-02-03T07:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.316984 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.317047 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.317058 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.317084 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.317098 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:21Z","lastTransitionTime":"2026-02-03T07:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.420589 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.420640 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.420661 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.420684 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.420702 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:21Z","lastTransitionTime":"2026-02-03T07:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.524260 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.524304 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.524313 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.524331 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.524342 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:21Z","lastTransitionTime":"2026-02-03T07:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.627532 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.627573 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.627586 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.627604 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.627617 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:21Z","lastTransitionTime":"2026-02-03T07:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.731328 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.731396 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.731408 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.731429 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.731448 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:21Z","lastTransitionTime":"2026-02-03T07:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.836340 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.836422 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.836445 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.836479 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.836501 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:21Z","lastTransitionTime":"2026-02-03T07:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.942007 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.942438 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.942604 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.942747 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:21 crc kubenswrapper[4708]: I0203 07:11:21.943092 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:21Z","lastTransitionTime":"2026-02-03T07:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.046356 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.046411 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.046424 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.046442 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.046454 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:22Z","lastTransitionTime":"2026-02-03T07:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.091866 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.091936 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:22 crc kubenswrapper[4708]: E0203 07:11:22.092286 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:22 crc kubenswrapper[4708]: E0203 07:11:22.092334 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.099220 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 14:43:38.581312483 +0000 UTC Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.115716 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.132694 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.144756 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.148355 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.148446 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.148472 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.148528 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.148547 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:22Z","lastTransitionTime":"2026-02-03T07:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.159912 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.171041 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.182571 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.200722 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:11:15.068829 6392 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0203 07:11:15.068895 6392 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:11:15.068902 6392 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:11:15.068917 6392 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 07:11:15.068922 6392 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 07:11:15.068916 6392 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:11:15.068946 6392 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:11:15.068954 6392 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 07:11:15.068962 6392 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:11:15.068973 6392 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07:11:15.068984 6392 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:11:15.068993 6392 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:11:15.069003 6392 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:11:15.069021 6392 factory.go:656] Stopping watch factory\\\\nI0203 07:11:15.069038 6392 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:11:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.215197 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.227900 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.242764 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.251672 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.251729 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.251750 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.251772 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.251788 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:22Z","lastTransitionTime":"2026-02-03T07:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.255298 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.269700 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.287135 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.299273 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.309299 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.321393 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.333658 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:22Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.353760 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.353841 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.353861 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.353886 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.353902 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:22Z","lastTransitionTime":"2026-02-03T07:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.456558 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.456592 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.456603 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.456619 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.456630 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:22Z","lastTransitionTime":"2026-02-03T07:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.559697 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.560184 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.560302 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.560420 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.560535 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:22Z","lastTransitionTime":"2026-02-03T07:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.663172 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.663207 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.663216 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.663229 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.663240 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:22Z","lastTransitionTime":"2026-02-03T07:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.765517 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.765582 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.765621 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.765650 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.765743 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:22Z","lastTransitionTime":"2026-02-03T07:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.869587 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.869635 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.869648 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.869668 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.869680 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:22Z","lastTransitionTime":"2026-02-03T07:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.973302 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.973368 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.973380 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.973398 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:22 crc kubenswrapper[4708]: I0203 07:11:22.973409 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:22Z","lastTransitionTime":"2026-02-03T07:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.076351 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.076402 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.076412 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.076427 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.076437 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:23Z","lastTransitionTime":"2026-02-03T07:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.092781 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:23 crc kubenswrapper[4708]: E0203 07:11:23.093290 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.092839 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:23 crc kubenswrapper[4708]: E0203 07:11:23.093555 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.100998 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 02:13:35.585581891 +0000 UTC Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.179078 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.179444 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.179644 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.179895 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.180203 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:23Z","lastTransitionTime":"2026-02-03T07:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.283912 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.284007 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.284022 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.284041 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.284055 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:23Z","lastTransitionTime":"2026-02-03T07:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.387291 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.388101 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.388136 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.388160 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.388175 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:23Z","lastTransitionTime":"2026-02-03T07:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.492144 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.492189 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.492233 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.492253 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.492266 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:23Z","lastTransitionTime":"2026-02-03T07:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.595685 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.596258 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.596459 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.596640 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.596779 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:23Z","lastTransitionTime":"2026-02-03T07:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.699930 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.699998 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.700016 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.700044 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.700063 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:23Z","lastTransitionTime":"2026-02-03T07:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.804112 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.804330 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.804345 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.804644 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.804678 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:23Z","lastTransitionTime":"2026-02-03T07:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.908220 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.908281 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.908325 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.908347 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:23 crc kubenswrapper[4708]: I0203 07:11:23.908363 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:23Z","lastTransitionTime":"2026-02-03T07:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.011929 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.011962 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.011973 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.011989 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.011998 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:24Z","lastTransitionTime":"2026-02-03T07:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.092184 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:24 crc kubenswrapper[4708]: E0203 07:11:24.092439 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.092826 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:24 crc kubenswrapper[4708]: E0203 07:11:24.093119 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.101865 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 11:39:10.332621814 +0000 UTC Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.114207 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.114506 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.114590 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.114688 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.114856 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:24Z","lastTransitionTime":"2026-02-03T07:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.218735 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.219196 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.219400 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.219572 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.219770 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:24Z","lastTransitionTime":"2026-02-03T07:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.322150 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.322194 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.322206 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.322230 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.322248 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:24Z","lastTransitionTime":"2026-02-03T07:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.424764 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.425516 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.425606 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.425681 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.425777 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:24Z","lastTransitionTime":"2026-02-03T07:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.528316 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.528345 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.528354 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.528367 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.528376 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:24Z","lastTransitionTime":"2026-02-03T07:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.630770 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.630831 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.630850 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.630863 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.630887 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:24Z","lastTransitionTime":"2026-02-03T07:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.733723 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.733772 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.733786 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.733826 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.733841 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:24Z","lastTransitionTime":"2026-02-03T07:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.837663 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.838492 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.838703 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.838944 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.839103 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:24Z","lastTransitionTime":"2026-02-03T07:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.942308 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.942370 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.942384 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.942406 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:24 crc kubenswrapper[4708]: I0203 07:11:24.942420 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:24Z","lastTransitionTime":"2026-02-03T07:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.045609 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.045678 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.045689 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.045731 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.045745 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:25Z","lastTransitionTime":"2026-02-03T07:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.092383 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.092430 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:25 crc kubenswrapper[4708]: E0203 07:11:25.092970 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:25 crc kubenswrapper[4708]: E0203 07:11:25.093183 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.102449 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 06:13:09.835226569 +0000 UTC Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.148691 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.149030 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.149110 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.149189 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.149261 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:25Z","lastTransitionTime":"2026-02-03T07:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.251500 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.251590 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.251613 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.251644 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.251663 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:25Z","lastTransitionTime":"2026-02-03T07:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.353836 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.353941 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.353958 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.353977 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.353986 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:25Z","lastTransitionTime":"2026-02-03T07:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.458541 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.458590 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.458601 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.458617 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.458630 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:25Z","lastTransitionTime":"2026-02-03T07:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.561775 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.561835 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.561848 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.561864 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.561878 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:25Z","lastTransitionTime":"2026-02-03T07:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.664172 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.664216 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.664231 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.664252 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.664268 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:25Z","lastTransitionTime":"2026-02-03T07:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.767539 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.767906 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.768017 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.768142 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.768245 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:25Z","lastTransitionTime":"2026-02-03T07:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.871061 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.871103 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.871112 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.871128 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.871137 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:25Z","lastTransitionTime":"2026-02-03T07:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.974199 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.974243 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.974254 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.974268 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:25 crc kubenswrapper[4708]: I0203 07:11:25.974279 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:25Z","lastTransitionTime":"2026-02-03T07:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.076224 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.076266 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.076278 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.076294 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.076306 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:26Z","lastTransitionTime":"2026-02-03T07:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.092032 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:26 crc kubenswrapper[4708]: E0203 07:11:26.092347 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.092096 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:26 crc kubenswrapper[4708]: E0203 07:11:26.092592 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.103546 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 05:02:12.963199717 +0000 UTC Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.178972 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.179011 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.179021 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.179037 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.179047 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:26Z","lastTransitionTime":"2026-02-03T07:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.283293 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.283344 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.283354 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.283370 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.283382 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:26Z","lastTransitionTime":"2026-02-03T07:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.386288 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.386599 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.386689 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.386873 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.386978 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:26Z","lastTransitionTime":"2026-02-03T07:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.490395 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.490438 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.490455 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.490471 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.490487 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:26Z","lastTransitionTime":"2026-02-03T07:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.593420 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.593884 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.594169 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.594267 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.594373 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:26Z","lastTransitionTime":"2026-02-03T07:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.696863 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.697204 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.697506 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.697740 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.698024 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:26Z","lastTransitionTime":"2026-02-03T07:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.800520 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.800713 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.800919 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.801071 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.801163 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:26Z","lastTransitionTime":"2026-02-03T07:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.904146 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.904219 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.904230 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.904249 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:26 crc kubenswrapper[4708]: I0203 07:11:26.904261 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:26Z","lastTransitionTime":"2026-02-03T07:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.006319 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.006357 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.006368 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.006382 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.006393 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:27Z","lastTransitionTime":"2026-02-03T07:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.091782 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:27 crc kubenswrapper[4708]: E0203 07:11:27.092219 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.091870 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:27 crc kubenswrapper[4708]: E0203 07:11:27.092462 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.103898 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 08:53:32.001141536 +0000 UTC Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.108596 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.108639 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.108650 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.108665 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.108676 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:27Z","lastTransitionTime":"2026-02-03T07:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.210607 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.210640 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.210652 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.210666 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.210677 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:27Z","lastTransitionTime":"2026-02-03T07:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.313353 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.313396 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.313410 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.313425 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.313438 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:27Z","lastTransitionTime":"2026-02-03T07:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.416069 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.416100 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.416110 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.416124 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.416133 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:27Z","lastTransitionTime":"2026-02-03T07:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.518051 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.518092 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.518103 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.518118 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.518130 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:27Z","lastTransitionTime":"2026-02-03T07:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.620461 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.620505 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.620517 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.620532 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.620545 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:27Z","lastTransitionTime":"2026-02-03T07:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.723101 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.723138 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.723147 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.723161 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.723174 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:27Z","lastTransitionTime":"2026-02-03T07:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.826033 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.826074 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.826084 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.826102 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.826112 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:27Z","lastTransitionTime":"2026-02-03T07:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.928061 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.928124 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.928135 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.928154 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:27 crc kubenswrapper[4708]: I0203 07:11:27.928166 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:27Z","lastTransitionTime":"2026-02-03T07:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.030727 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.030768 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.030781 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.030823 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.030836 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:28Z","lastTransitionTime":"2026-02-03T07:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.093140 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:28 crc kubenswrapper[4708]: E0203 07:11:28.093281 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.093357 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:28 crc kubenswrapper[4708]: E0203 07:11:28.093527 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.104632 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 01:05:52.673757997 +0000 UTC Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.133737 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.133785 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.133817 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.133835 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.133848 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:28Z","lastTransitionTime":"2026-02-03T07:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.235689 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.235729 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.235741 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.235754 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.235762 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:28Z","lastTransitionTime":"2026-02-03T07:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.338469 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.338526 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.338537 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.338557 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.338569 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:28Z","lastTransitionTime":"2026-02-03T07:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.441405 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.441452 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.441465 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.441482 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.441494 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:28Z","lastTransitionTime":"2026-02-03T07:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.543687 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.543719 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.543727 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.543739 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.543747 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:28Z","lastTransitionTime":"2026-02-03T07:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.645788 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.645882 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.645898 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.645923 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.645943 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:28Z","lastTransitionTime":"2026-02-03T07:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.748783 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.748849 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.748862 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.748877 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.748887 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:28Z","lastTransitionTime":"2026-02-03T07:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.851404 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.851438 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.851447 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.851460 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.851469 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:28Z","lastTransitionTime":"2026-02-03T07:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.954490 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.954528 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.954539 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.954558 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:28 crc kubenswrapper[4708]: I0203 07:11:28.954570 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:28Z","lastTransitionTime":"2026-02-03T07:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.057101 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.057132 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.057140 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.057153 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.057162 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:29Z","lastTransitionTime":"2026-02-03T07:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.092561 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.092561 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:29 crc kubenswrapper[4708]: E0203 07:11:29.092907 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:29 crc kubenswrapper[4708]: E0203 07:11:29.093033 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.093182 4708 scope.go:117] "RemoveContainer" containerID="5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb" Feb 03 07:11:29 crc kubenswrapper[4708]: E0203 07:11:29.093440 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.105232 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 10:41:40.967771356 +0000 UTC Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.158690 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.158721 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.158732 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.158747 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.158757 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:29Z","lastTransitionTime":"2026-02-03T07:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.261025 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.261056 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.261064 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.261077 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.261087 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:29Z","lastTransitionTime":"2026-02-03T07:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.363303 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.363352 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.363363 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.363378 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.363388 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:29Z","lastTransitionTime":"2026-02-03T07:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.465838 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.465872 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.465881 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.465894 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.465903 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:29Z","lastTransitionTime":"2026-02-03T07:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.567961 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.568032 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.568045 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.568067 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.568082 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:29Z","lastTransitionTime":"2026-02-03T07:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.670500 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.670557 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.670571 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.670590 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.670601 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:29Z","lastTransitionTime":"2026-02-03T07:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.772473 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.772511 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.772521 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.772534 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.772543 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:29Z","lastTransitionTime":"2026-02-03T07:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.874522 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.874574 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.874586 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.874605 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.874617 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:29Z","lastTransitionTime":"2026-02-03T07:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.976608 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.976652 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.976662 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.976678 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:29 crc kubenswrapper[4708]: I0203 07:11:29.976690 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:29Z","lastTransitionTime":"2026-02-03T07:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.079302 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.079357 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.079375 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.079398 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.079417 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.092723 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.092767 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:30 crc kubenswrapper[4708]: E0203 07:11:30.092989 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:30 crc kubenswrapper[4708]: E0203 07:11:30.093141 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.106191 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 04:05:01.902691307 +0000 UTC Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.181704 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.181740 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.181749 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.181761 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.181770 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.284456 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.284498 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.284511 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.284528 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.284539 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.386944 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.387016 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.387033 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.387057 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.387081 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.490177 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.490219 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.490229 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.490245 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.490259 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.523067 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.523101 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.523109 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.523123 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.523134 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: E0203 07:11:30.542388 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:30Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.546297 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.546331 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.546341 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.546356 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.546366 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: E0203 07:11:30.566551 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:30Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.570268 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.570292 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.570301 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.570315 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.570325 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: E0203 07:11:30.582821 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:30Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.587141 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.587184 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.587196 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.587214 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.587226 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: E0203 07:11:30.599928 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:30Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.607525 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.607565 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.607577 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.607595 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.607605 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: E0203 07:11:30.621605 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:30Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:30 crc kubenswrapper[4708]: E0203 07:11:30.621749 4708 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.623302 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.623348 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.623359 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.623374 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.623383 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.725474 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.725526 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.725538 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.725555 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.725565 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.828139 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.828205 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.828223 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.828252 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.828268 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.930993 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.931036 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.931045 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.931059 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:30 crc kubenswrapper[4708]: I0203 07:11:30.931071 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:30Z","lastTransitionTime":"2026-02-03T07:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.033344 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.033397 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.033409 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.033428 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.033440 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:31Z","lastTransitionTime":"2026-02-03T07:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.092201 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.092209 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:31 crc kubenswrapper[4708]: E0203 07:11:31.092363 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:31 crc kubenswrapper[4708]: E0203 07:11:31.092462 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.106993 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 14:35:07.675510456 +0000 UTC Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.127739 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:31 crc kubenswrapper[4708]: E0203 07:11:31.127892 4708 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:11:31 crc kubenswrapper[4708]: E0203 07:11:31.127963 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs podName:851add34-7566-4ed5-b70a-c7935eb26e4f nodeName:}" failed. No retries permitted until 2026-02-03 07:12:03.127942914 +0000 UTC m=+102.109889721 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs") pod "network-metrics-daemon-6thl9" (UID: "851add34-7566-4ed5-b70a-c7935eb26e4f") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.135741 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.135787 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.135831 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.135850 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.135862 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:31Z","lastTransitionTime":"2026-02-03T07:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.238018 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.238063 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.238073 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.238089 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.238101 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:31Z","lastTransitionTime":"2026-02-03T07:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.340582 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.340611 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.340621 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.340636 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.340644 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:31Z","lastTransitionTime":"2026-02-03T07:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.443201 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.443237 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.443248 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.443264 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.443278 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:31Z","lastTransitionTime":"2026-02-03T07:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.545387 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.545417 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.545428 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.545442 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.545452 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:31Z","lastTransitionTime":"2026-02-03T07:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.647499 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.647532 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.647541 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.647553 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.647562 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:31Z","lastTransitionTime":"2026-02-03T07:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.749724 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.749776 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.749814 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.749837 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.749852 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:31Z","lastTransitionTime":"2026-02-03T07:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.852558 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.852615 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.852625 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.852641 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.852653 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:31Z","lastTransitionTime":"2026-02-03T07:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.955330 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.955386 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.955398 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.955415 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:31 crc kubenswrapper[4708]: I0203 07:11:31.955429 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:31Z","lastTransitionTime":"2026-02-03T07:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.057879 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.057923 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.057932 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.057947 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.057958 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:32Z","lastTransitionTime":"2026-02-03T07:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.091874 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.091886 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:32 crc kubenswrapper[4708]: E0203 07:11:32.091994 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:32 crc kubenswrapper[4708]: E0203 07:11:32.092164 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.107147 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 13:32:23.55978499 +0000 UTC Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.107263 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.120344 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.136216 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.149341 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.160121 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.160150 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.160162 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.160176 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.160186 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:32Z","lastTransitionTime":"2026-02-03T07:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.165068 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.177122 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.190317 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.204094 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.223449 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:11:15.068829 6392 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0203 07:11:15.068895 6392 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:11:15.068902 6392 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:11:15.068917 6392 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 07:11:15.068922 6392 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 07:11:15.068916 6392 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:11:15.068946 6392 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:11:15.068954 6392 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 07:11:15.068962 6392 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:11:15.068973 6392 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07:11:15.068984 6392 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:11:15.068993 6392 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:11:15.069003 6392 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:11:15.069021 6392 factory.go:656] Stopping watch factory\\\\nI0203 07:11:15.069038 6392 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:11:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.236490 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.260054 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.262738 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.262772 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.262785 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.262816 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.262827 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:32Z","lastTransitionTime":"2026-02-03T07:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.281897 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.299428 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.314725 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.325058 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.336373 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.344550 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.365558 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.365606 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.365619 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.365641 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.365655 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:32Z","lastTransitionTime":"2026-02-03T07:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.467972 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.468031 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.468042 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.468059 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.468069 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:32Z","lastTransitionTime":"2026-02-03T07:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.543714 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2fzr_7cedfe91-d1c3-4c56-9aac-797ecade9468/kube-multus/0.log" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.543761 4708 generic.go:334] "Generic (PLEG): container finished" podID="7cedfe91-d1c3-4c56-9aac-797ecade9468" containerID="5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd" exitCode=1 Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.543816 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2fzr" event={"ID":"7cedfe91-d1c3-4c56-9aac-797ecade9468","Type":"ContainerDied","Data":"5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.544324 4708 scope.go:117] "RemoveContainer" containerID="5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.561106 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.570227 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.570431 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.570530 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.570633 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.570746 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:32Z","lastTransitionTime":"2026-02-03T07:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.574842 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.585001 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.595737 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.609560 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.618955 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.631461 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.645061 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.660684 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.672958 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.673239 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.673323 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.673391 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.673446 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:32Z","lastTransitionTime":"2026-02-03T07:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.673942 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.686429 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.702588 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.725349 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:11:15.068829 6392 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0203 07:11:15.068895 6392 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:11:15.068902 6392 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:11:15.068917 6392 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 07:11:15.068922 6392 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 07:11:15.068916 6392 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:11:15.068946 6392 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:11:15.068954 6392 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 07:11:15.068962 6392 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:11:15.068973 6392 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07:11:15.068984 6392 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:11:15.068993 6392 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:11:15.069003 6392 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:11:15.069021 6392 factory.go:656] Stopping watch factory\\\\nI0203 07:11:15.069038 6392 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:11:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.739430 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.753260 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.765330 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.775590 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.775619 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.775628 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.775641 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.775650 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:32Z","lastTransitionTime":"2026-02-03T07:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.777898 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:31Z\\\",\\\"message\\\":\\\"2026-02-03T07:10:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea\\\\n2026-02-03T07:10:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea to /host/opt/cni/bin/\\\\n2026-02-03T07:10:46Z [verbose] multus-daemon started\\\\n2026-02-03T07:10:46Z [verbose] Readiness Indicator file check\\\\n2026-02-03T07:11:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:32Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.878677 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.878725 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.878738 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.878755 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.878767 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:32Z","lastTransitionTime":"2026-02-03T07:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.981361 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.981414 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.981426 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.981444 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:32 crc kubenswrapper[4708]: I0203 07:11:32.981457 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:32Z","lastTransitionTime":"2026-02-03T07:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.085565 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.085964 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.085990 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.086016 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.086032 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:33Z","lastTransitionTime":"2026-02-03T07:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.092896 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.092904 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:33 crc kubenswrapper[4708]: E0203 07:11:33.093280 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:33 crc kubenswrapper[4708]: E0203 07:11:33.093302 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.107859 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 01:36:51.180404035 +0000 UTC Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.189268 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.189310 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.189319 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.189337 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.189348 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:33Z","lastTransitionTime":"2026-02-03T07:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.291473 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.291521 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.291534 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.291556 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.291567 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:33Z","lastTransitionTime":"2026-02-03T07:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.394524 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.394554 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.394563 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.394576 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.394584 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:33Z","lastTransitionTime":"2026-02-03T07:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.496397 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.496442 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.496453 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.496468 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.496477 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:33Z","lastTransitionTime":"2026-02-03T07:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.549510 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2fzr_7cedfe91-d1c3-4c56-9aac-797ecade9468/kube-multus/0.log" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.549589 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2fzr" event={"ID":"7cedfe91-d1c3-4c56-9aac-797ecade9468","Type":"ContainerStarted","Data":"0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e"} Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.567056 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.581010 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.594738 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.598828 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.598865 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.598878 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.598894 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.598905 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:33Z","lastTransitionTime":"2026-02-03T07:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.606888 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.619146 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.632326 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.644080 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:31Z\\\",\\\"message\\\":\\\"2026-02-03T07:10:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea\\\\n2026-02-03T07:10:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea to /host/opt/cni/bin/\\\\n2026-02-03T07:10:46Z [verbose] multus-daemon started\\\\n2026-02-03T07:10:46Z [verbose] Readiness Indicator file check\\\\n2026-02-03T07:11:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.660941 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:11:15.068829 6392 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0203 07:11:15.068895 6392 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:11:15.068902 6392 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:11:15.068917 6392 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 07:11:15.068922 6392 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 07:11:15.068916 6392 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:11:15.068946 6392 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:11:15.068954 6392 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 07:11:15.068962 6392 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:11:15.068973 6392 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07:11:15.068984 6392 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:11:15.068993 6392 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:11:15.069003 6392 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:11:15.069021 6392 factory.go:656] Stopping watch factory\\\\nI0203 07:11:15.069038 6392 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:11:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.672327 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.682976 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.695035 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.702880 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.702939 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.702952 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.702970 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.702982 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:33Z","lastTransitionTime":"2026-02-03T07:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.704391 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.716312 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.724866 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.734422 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.743053 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.755195 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:33Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.806356 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.806433 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.806444 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.806511 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.806522 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:33Z","lastTransitionTime":"2026-02-03T07:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.909414 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.909462 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.909477 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.909505 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:33 crc kubenswrapper[4708]: I0203 07:11:33.909521 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:33Z","lastTransitionTime":"2026-02-03T07:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.012485 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.012527 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.012539 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.012554 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.012567 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:34Z","lastTransitionTime":"2026-02-03T07:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.093096 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.093192 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:34 crc kubenswrapper[4708]: E0203 07:11:34.093316 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:34 crc kubenswrapper[4708]: E0203 07:11:34.093511 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.108932 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 05:01:33.411318572 +0000 UTC Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.114614 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.114680 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.114712 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.114742 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.114762 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:34Z","lastTransitionTime":"2026-02-03T07:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.216919 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.217554 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.217584 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.217608 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.217622 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:34Z","lastTransitionTime":"2026-02-03T07:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.319957 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.320004 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.320015 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.320035 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.320047 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:34Z","lastTransitionTime":"2026-02-03T07:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.423116 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.423160 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.423171 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.423187 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.423199 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:34Z","lastTransitionTime":"2026-02-03T07:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.526211 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.526273 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.526285 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.526304 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.526318 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:34Z","lastTransitionTime":"2026-02-03T07:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.629293 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.629332 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.629341 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.629356 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.629367 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:34Z","lastTransitionTime":"2026-02-03T07:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.731216 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.731257 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.731270 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.731286 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.731298 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:34Z","lastTransitionTime":"2026-02-03T07:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.833882 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.833942 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.833958 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.833981 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.833997 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:34Z","lastTransitionTime":"2026-02-03T07:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.936702 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.936778 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.936844 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.936870 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:34 crc kubenswrapper[4708]: I0203 07:11:34.936912 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:34Z","lastTransitionTime":"2026-02-03T07:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.039263 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.039316 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.039330 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.039366 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.039390 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:35Z","lastTransitionTime":"2026-02-03T07:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.091939 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.092046 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:35 crc kubenswrapper[4708]: E0203 07:11:35.092149 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:35 crc kubenswrapper[4708]: E0203 07:11:35.092297 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.109925 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 07:24:26.38684454 +0000 UTC Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.141915 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.141966 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.141976 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.141992 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.142004 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:35Z","lastTransitionTime":"2026-02-03T07:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.244636 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.244690 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.244707 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.244732 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.244748 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:35Z","lastTransitionTime":"2026-02-03T07:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.347226 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.347285 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.347296 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.347311 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.347321 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:35Z","lastTransitionTime":"2026-02-03T07:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.454877 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.454918 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.454928 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.454953 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.454963 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:35Z","lastTransitionTime":"2026-02-03T07:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.556417 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.556462 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.556474 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.556488 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.556499 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:35Z","lastTransitionTime":"2026-02-03T07:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.659910 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.659947 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.659955 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.659970 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.659981 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:35Z","lastTransitionTime":"2026-02-03T07:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.762321 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.762350 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.762359 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.762371 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.762379 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:35Z","lastTransitionTime":"2026-02-03T07:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.865308 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.865346 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.865355 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.865372 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.865383 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:35Z","lastTransitionTime":"2026-02-03T07:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.967519 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.967620 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.967641 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.967667 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:35 crc kubenswrapper[4708]: I0203 07:11:35.967685 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:35Z","lastTransitionTime":"2026-02-03T07:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.070602 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.070641 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.070670 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.070707 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.070720 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:36Z","lastTransitionTime":"2026-02-03T07:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.091943 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:36 crc kubenswrapper[4708]: E0203 07:11:36.092081 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.092114 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:36 crc kubenswrapper[4708]: E0203 07:11:36.092229 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.110053 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 00:28:19.178460902 +0000 UTC Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.173267 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.173310 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.173322 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.173340 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.173351 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:36Z","lastTransitionTime":"2026-02-03T07:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.275768 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.275888 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.275908 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.275934 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.275952 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:36Z","lastTransitionTime":"2026-02-03T07:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.379268 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.379316 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.379332 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.379353 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.379371 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:36Z","lastTransitionTime":"2026-02-03T07:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.482833 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.482877 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.482895 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.482941 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.482953 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:36Z","lastTransitionTime":"2026-02-03T07:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.585919 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.585988 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.586011 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.586041 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.586063 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:36Z","lastTransitionTime":"2026-02-03T07:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.688258 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.688301 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.688317 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.688332 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.688342 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:36Z","lastTransitionTime":"2026-02-03T07:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.791365 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.791406 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.791415 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.791433 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.791443 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:36Z","lastTransitionTime":"2026-02-03T07:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.893786 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.893873 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.893899 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.893924 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.893943 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:36Z","lastTransitionTime":"2026-02-03T07:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.996465 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.996510 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.996519 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.996532 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:36 crc kubenswrapper[4708]: I0203 07:11:36.996541 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:36Z","lastTransitionTime":"2026-02-03T07:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.091834 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.091909 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:37 crc kubenswrapper[4708]: E0203 07:11:37.091992 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:37 crc kubenswrapper[4708]: E0203 07:11:37.092096 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.099692 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.099740 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.099753 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.099771 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.099784 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:37Z","lastTransitionTime":"2026-02-03T07:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.110944 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 07:20:11.232208814 +0000 UTC Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.202053 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.202098 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.202127 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.202145 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.202155 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:37Z","lastTransitionTime":"2026-02-03T07:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.304834 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.304896 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.304911 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.304931 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.304944 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:37Z","lastTransitionTime":"2026-02-03T07:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.446502 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.446551 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.446563 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.446579 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.446590 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:37Z","lastTransitionTime":"2026-02-03T07:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.549230 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.549287 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.549307 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.549333 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.549352 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:37Z","lastTransitionTime":"2026-02-03T07:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.651755 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.651788 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.651811 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.651826 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.651835 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:37Z","lastTransitionTime":"2026-02-03T07:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.753384 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.753425 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.753436 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.753487 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.753500 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:37Z","lastTransitionTime":"2026-02-03T07:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.855545 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.855572 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.855579 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.855591 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.855600 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:37Z","lastTransitionTime":"2026-02-03T07:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.958877 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.958926 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.958936 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.958953 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:37 crc kubenswrapper[4708]: I0203 07:11:37.958971 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:37Z","lastTransitionTime":"2026-02-03T07:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.061931 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.061985 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.061997 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.062015 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.062028 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:38Z","lastTransitionTime":"2026-02-03T07:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.092361 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:38 crc kubenswrapper[4708]: E0203 07:11:38.092557 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.092629 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:38 crc kubenswrapper[4708]: E0203 07:11:38.092866 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.111458 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 18:22:08.98240079 +0000 UTC Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.164890 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.164961 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.164977 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.165001 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.165020 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:38Z","lastTransitionTime":"2026-02-03T07:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.269467 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.269537 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.269558 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.269582 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.269600 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:38Z","lastTransitionTime":"2026-02-03T07:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.373433 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.373511 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.373536 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.373568 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.373590 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:38Z","lastTransitionTime":"2026-02-03T07:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.475989 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.476047 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.476058 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.476078 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.476090 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:38Z","lastTransitionTime":"2026-02-03T07:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.578834 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.578925 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.578950 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.578974 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.578989 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:38Z","lastTransitionTime":"2026-02-03T07:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.681164 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.681192 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.681203 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.681214 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.681222 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:38Z","lastTransitionTime":"2026-02-03T07:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.783779 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.783888 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.783905 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.783927 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.783943 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:38Z","lastTransitionTime":"2026-02-03T07:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.886695 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.886755 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.886767 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.886783 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.886816 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:38Z","lastTransitionTime":"2026-02-03T07:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.989598 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.989725 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.989761 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.989830 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:38 crc kubenswrapper[4708]: I0203 07:11:38.989854 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:38Z","lastTransitionTime":"2026-02-03T07:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.091966 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.092069 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:39 crc kubenswrapper[4708]: E0203 07:11:39.092155 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:39 crc kubenswrapper[4708]: E0203 07:11:39.092316 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.093362 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.093413 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.093429 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.093452 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.093470 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:39Z","lastTransitionTime":"2026-02-03T07:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.111743 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 23:36:16.27017973 +0000 UTC Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.197759 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.197847 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.197873 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.197900 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.197919 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:39Z","lastTransitionTime":"2026-02-03T07:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.305053 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.305758 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.305852 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.305879 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.306527 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:39Z","lastTransitionTime":"2026-02-03T07:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.409989 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.410068 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.410085 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.410111 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.410128 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:39Z","lastTransitionTime":"2026-02-03T07:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.512956 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.513033 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.513058 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.513087 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.513108 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:39Z","lastTransitionTime":"2026-02-03T07:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.616484 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.616578 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.616596 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.616618 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.616637 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:39Z","lastTransitionTime":"2026-02-03T07:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.718903 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.718966 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.718983 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.719009 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.719028 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:39Z","lastTransitionTime":"2026-02-03T07:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.821857 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.821935 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.821954 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.821982 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.822005 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:39Z","lastTransitionTime":"2026-02-03T07:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.923958 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.924042 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.924065 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.924151 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:39 crc kubenswrapper[4708]: I0203 07:11:39.924178 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:39Z","lastTransitionTime":"2026-02-03T07:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.026581 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.026668 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.026707 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.026738 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.026762 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.092691 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.092882 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:40 crc kubenswrapper[4708]: E0203 07:11:40.093039 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:40 crc kubenswrapper[4708]: E0203 07:11:40.093189 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.112692 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 13:46:00.786267179 +0000 UTC Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.129280 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.129326 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.129337 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.129353 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.129365 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.233286 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.233494 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.233525 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.233555 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.233595 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.337370 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.337494 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.337532 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.337559 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.337575 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.439943 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.440010 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.440031 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.440059 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.440080 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.547430 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.547492 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.547505 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.547523 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.547534 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.650027 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.650106 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.650129 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.650160 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.650182 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.753482 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.753550 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.753573 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.753604 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.753625 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.816621 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.816703 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.816727 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.816758 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.816777 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: E0203 07:11:40.838236 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:40Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.843661 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.843715 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.843732 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.843754 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.843771 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: E0203 07:11:40.865992 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:40Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.871758 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.871914 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.871935 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.871958 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.871975 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: E0203 07:11:40.892433 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:40Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.897360 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.897416 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.897434 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.897460 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.897478 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: E0203 07:11:40.920786 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:40Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.925927 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.925983 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.926001 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.926022 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.926040 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:40 crc kubenswrapper[4708]: E0203 07:11:40.946728 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:40Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:40 crc kubenswrapper[4708]: E0203 07:11:40.946933 4708 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.948484 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.948531 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.948546 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.948567 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:40 crc kubenswrapper[4708]: I0203 07:11:40.948584 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:40Z","lastTransitionTime":"2026-02-03T07:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.051741 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.051864 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.051886 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.051911 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.051928 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:41Z","lastTransitionTime":"2026-02-03T07:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.092716 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.092886 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:41 crc kubenswrapper[4708]: E0203 07:11:41.092926 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:41 crc kubenswrapper[4708]: E0203 07:11:41.093240 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.111205 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.113354 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 12:42:17.990895616 +0000 UTC Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.154193 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.154261 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.154286 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.154313 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.154332 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:41Z","lastTransitionTime":"2026-02-03T07:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.257342 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.257440 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.257456 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.257481 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.257498 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:41Z","lastTransitionTime":"2026-02-03T07:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.360579 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.360652 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.360669 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.360694 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.360713 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:41Z","lastTransitionTime":"2026-02-03T07:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.463177 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.463225 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.463235 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.463257 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.463270 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:41Z","lastTransitionTime":"2026-02-03T07:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.566579 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.566624 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.566635 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.566651 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.566663 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:41Z","lastTransitionTime":"2026-02-03T07:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.670008 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.670047 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.670056 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.670070 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.670079 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:41Z","lastTransitionTime":"2026-02-03T07:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.771993 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.772032 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.772043 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.772060 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.772070 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:41Z","lastTransitionTime":"2026-02-03T07:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.874437 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.874502 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.874515 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.874554 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.874567 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:41Z","lastTransitionTime":"2026-02-03T07:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.977184 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.977237 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.977253 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.977275 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:41 crc kubenswrapper[4708]: I0203 07:11:41.977291 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:41Z","lastTransitionTime":"2026-02-03T07:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.080723 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.080828 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.080846 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.080870 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.080887 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:42Z","lastTransitionTime":"2026-02-03T07:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.092164 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:42 crc kubenswrapper[4708]: E0203 07:11:42.092318 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.092398 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:42 crc kubenswrapper[4708]: E0203 07:11:42.092530 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.111665 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.114864 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 11:41:19.891616646 +0000 UTC Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.126343 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.141276 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.152069 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.167291 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.179027 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.183304 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.183380 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.183393 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.183411 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.183452 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:42Z","lastTransitionTime":"2026-02-03T07:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.193518 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.207083 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.220658 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.234712 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.246732 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.259922 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:31Z\\\",\\\"message\\\":\\\"2026-02-03T07:10:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea\\\\n2026-02-03T07:10:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea to /host/opt/cni/bin/\\\\n2026-02-03T07:10:46Z [verbose] multus-daemon started\\\\n2026-02-03T07:10:46Z [verbose] Readiness Indicator file check\\\\n2026-02-03T07:11:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.280206 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:11:15.068829 6392 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0203 07:11:15.068895 6392 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:11:15.068902 6392 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:11:15.068917 6392 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 07:11:15.068922 6392 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 07:11:15.068916 6392 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:11:15.068946 6392 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:11:15.068954 6392 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 07:11:15.068962 6392 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:11:15.068973 6392 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07:11:15.068984 6392 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:11:15.068993 6392 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:11:15.069003 6392 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:11:15.069021 6392 factory.go:656] Stopping watch factory\\\\nI0203 07:11:15.069038 6392 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:11:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.285392 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.285438 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.285453 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.285469 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.285480 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:42Z","lastTransitionTime":"2026-02-03T07:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.293658 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.306610 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.320086 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.331420 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.350548 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e0b9c8a-9915-45ef-adfe-8a3f94b838f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed87f447ce590518e6c11016afed10c275ec305ce2f1569f049fa9e72e314389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81f8040a3903c93c6a8e700733e6d306d4d057145562bca18df93f84e05c130\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53af1b2767b38d2af1ddc24c344dda295c79bdf418f155137756679ba689cd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5281fb425defd1d58d168728e9d0c9c3e284733bf99c8657b71c3d70ad7faad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b91ee37b2b4a9d611dce015828570fb8b69da2b4cdbfb14452f58304390de9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:42Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.387451 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.387521 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.387538 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.387561 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.387578 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:42Z","lastTransitionTime":"2026-02-03T07:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.490528 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.490572 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.490583 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.490600 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.490614 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:42Z","lastTransitionTime":"2026-02-03T07:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.593046 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.593084 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.593095 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.593110 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.593122 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:42Z","lastTransitionTime":"2026-02-03T07:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.695547 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.695589 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.695597 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.695612 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.695622 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:42Z","lastTransitionTime":"2026-02-03T07:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.797987 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.798039 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.798053 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.798072 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.798084 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:42Z","lastTransitionTime":"2026-02-03T07:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.900690 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.900774 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.900850 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.900882 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:42 crc kubenswrapper[4708]: I0203 07:11:42.900899 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:42Z","lastTransitionTime":"2026-02-03T07:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.004029 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.004480 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.004514 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.004546 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.004588 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:43Z","lastTransitionTime":"2026-02-03T07:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.092519 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.092552 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:43 crc kubenswrapper[4708]: E0203 07:11:43.092709 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:43 crc kubenswrapper[4708]: E0203 07:11:43.092846 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.107553 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.107604 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.107620 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.107642 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.107661 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:43Z","lastTransitionTime":"2026-02-03T07:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.115865 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 07:45:16.115425334 +0000 UTC Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.210068 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.210097 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.210105 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.210117 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.210125 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:43Z","lastTransitionTime":"2026-02-03T07:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.312082 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.312109 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.312118 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.312130 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.312139 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:43Z","lastTransitionTime":"2026-02-03T07:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.414782 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.414888 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.414912 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.414944 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.414968 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:43Z","lastTransitionTime":"2026-02-03T07:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.517179 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.517210 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.517219 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.517233 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.517244 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:43Z","lastTransitionTime":"2026-02-03T07:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.619712 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.619759 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.619771 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.619786 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.619821 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:43Z","lastTransitionTime":"2026-02-03T07:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.723338 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.723426 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.723451 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.723478 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.723495 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:43Z","lastTransitionTime":"2026-02-03T07:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.826196 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.826266 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.826290 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.826317 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.826338 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:43Z","lastTransitionTime":"2026-02-03T07:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.929468 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.929524 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.929537 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.929556 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:43 crc kubenswrapper[4708]: I0203 07:11:43.929572 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:43Z","lastTransitionTime":"2026-02-03T07:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.032228 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.032275 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.032286 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.032301 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.032312 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:44Z","lastTransitionTime":"2026-02-03T07:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.091878 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:44 crc kubenswrapper[4708]: E0203 07:11:44.091991 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.092607 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:44 crc kubenswrapper[4708]: E0203 07:11:44.092670 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.093316 4708 scope.go:117] "RemoveContainer" containerID="5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.116034 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 01:02:41.261950462 +0000 UTC Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.135855 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.135902 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.135925 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.135952 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.135974 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:44Z","lastTransitionTime":"2026-02-03T07:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.238865 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.238903 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.238915 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.238928 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.238938 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:44Z","lastTransitionTime":"2026-02-03T07:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.340920 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.340969 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.340978 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.340994 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.341004 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:44Z","lastTransitionTime":"2026-02-03T07:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.443818 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.443855 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.443867 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.443880 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.443888 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:44Z","lastTransitionTime":"2026-02-03T07:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.546747 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.546825 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.546843 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.546860 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.546872 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:44Z","lastTransitionTime":"2026-02-03T07:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.587527 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/2.log" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.590733 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.591289 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.614866 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.637518 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.648781 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.648850 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.648861 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.648878 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.648889 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:44Z","lastTransitionTime":"2026-02-03T07:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.658182 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.681948 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.699855 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.713446 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.724659 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.735149 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.745905 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:31Z\\\",\\\"message\\\":\\\"2026-02-03T07:10:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea\\\\n2026-02-03T07:10:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea to /host/opt/cni/bin/\\\\n2026-02-03T07:10:46Z [verbose] multus-daemon started\\\\n2026-02-03T07:10:46Z [verbose] Readiness Indicator file check\\\\n2026-02-03T07:11:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.750479 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.750517 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.750529 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.750544 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.750555 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:44Z","lastTransitionTime":"2026-02-03T07:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.768842 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:11:15.068829 6392 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0203 07:11:15.068895 6392 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:11:15.068902 6392 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:11:15.068917 6392 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 07:11:15.068922 6392 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 07:11:15.068916 6392 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:11:15.068946 6392 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:11:15.068954 6392 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 07:11:15.068962 6392 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:11:15.068973 6392 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07:11:15.068984 6392 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:11:15.068993 6392 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:11:15.069003 6392 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:11:15.069021 6392 factory.go:656] Stopping watch factory\\\\nI0203 07:11:15.069038 6392 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:11:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.789528 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e0b9c8a-9915-45ef-adfe-8a3f94b838f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed87f447ce590518e6c11016afed10c275ec305ce2f1569f049fa9e72e314389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81f8040a3903c93c6a8e700733e6d306d4d057145562bca18df93f84e05c130\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53af1b2767b38d2af1ddc24c344dda295c79bdf418f155137756679ba689cd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5281fb425defd1d58d168728e9d0c9c3e284733bf99c8657b71c3d70ad7faad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b91ee37b2b4a9d611dce015828570fb8b69da2b4cdbfb14452f58304390de9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.805831 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.828592 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.843080 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.853845 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.853930 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.853954 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.853983 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.854002 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:44Z","lastTransitionTime":"2026-02-03T07:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.858942 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.870119 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.882726 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.894652 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:44Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.956685 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.956721 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.956730 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.956743 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:44 crc kubenswrapper[4708]: I0203 07:11:44.956754 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:44Z","lastTransitionTime":"2026-02-03T07:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.059383 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.059426 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.059434 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.059447 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.059456 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:45Z","lastTransitionTime":"2026-02-03T07:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.092140 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.092190 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:45 crc kubenswrapper[4708]: E0203 07:11:45.092290 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:45 crc kubenswrapper[4708]: E0203 07:11:45.092429 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.116404 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 09:35:53.928880221 +0000 UTC Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.162478 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.162556 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.162580 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.162627 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.162652 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:45Z","lastTransitionTime":"2026-02-03T07:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.265620 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.265691 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.265714 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.265741 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.265764 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:45Z","lastTransitionTime":"2026-02-03T07:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.368743 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.368821 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.368839 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.368862 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.368879 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:45Z","lastTransitionTime":"2026-02-03T07:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.472471 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.472512 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.472520 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.472534 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.472545 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:45Z","lastTransitionTime":"2026-02-03T07:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.575543 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.575591 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.575603 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.575619 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.575629 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:45Z","lastTransitionTime":"2026-02-03T07:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.597048 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/3.log" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.598339 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/2.log" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.602918 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" exitCode=1 Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.602970 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.603015 4708 scope.go:117] "RemoveContainer" containerID="5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.604189 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:11:45 crc kubenswrapper[4708]: E0203 07:11:45.604417 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.622300 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.635412 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.648683 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.661505 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:31Z\\\",\\\"message\\\":\\\"2026-02-03T07:10:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea\\\\n2026-02-03T07:10:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea to /host/opt/cni/bin/\\\\n2026-02-03T07:10:46Z [verbose] multus-daemon started\\\\n2026-02-03T07:10:46Z [verbose] Readiness Indicator file check\\\\n2026-02-03T07:11:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.678605 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.678653 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.678664 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.678679 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.678691 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:45Z","lastTransitionTime":"2026-02-03T07:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.684598 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c478c9f7afdf37b64dbf2e28235ceeb1dd4ec262f8a09697f0433be0b2c5beb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0203 07:11:15.068829 6392 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0203 07:11:15.068895 6392 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0203 07:11:15.068902 6392 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0203 07:11:15.068917 6392 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0203 07:11:15.068922 6392 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0203 07:11:15.068916 6392 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0203 07:11:15.068946 6392 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0203 07:11:15.068954 6392 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0203 07:11:15.068962 6392 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0203 07:11:15.068973 6392 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0203 07:11:15.068984 6392 handler.go:208] Removed *v1.Node event handler 7\\\\nI0203 07:11:15.068993 6392 handler.go:208] Removed *v1.Node event handler 2\\\\nI0203 07:11:15.069003 6392 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0203 07:11:15.069021 6392 factory.go:656] Stopping watch factory\\\\nI0203 07:11:15.069038 6392 ovnkube.go:599] Stopped ovnkube\\\\nI0203 07:11:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:45Z\\\",\\\"message\\\":\\\"-749d76644c-4f7fx\\\\nI0203 07:11:45.163816 6827 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0203 07:11:45.165974 6827 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0203 07:11:45.165989 6827 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI0203 07:11:45.166000 6827 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI0203 07:11:45.166009 6827 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0203 07:11:45.163835 6827 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI0203 07:11:45.166028 6827 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI0203 07:11:45.166038 6827 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nF0203 07:11:45.163850 6827 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.706381 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e0b9c8a-9915-45ef-adfe-8a3f94b838f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed87f447ce590518e6c11016afed10c275ec305ce2f1569f049fa9e72e314389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81f8040a3903c93c6a8e700733e6d306d4d057145562bca18df93f84e05c130\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53af1b2767b38d2af1ddc24c344dda295c79bdf418f155137756679ba689cd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5281fb425defd1d58d168728e9d0c9c3e284733bf99c8657b71c3d70ad7faad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b91ee37b2b4a9d611dce015828570fb8b69da2b4cdbfb14452f58304390de9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.721643 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.733457 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.744897 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.757184 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.768244 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.780530 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.782305 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.782378 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.782395 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.782419 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.782437 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:45Z","lastTransitionTime":"2026-02-03T07:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.794258 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.809219 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.823618 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.837061 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.854555 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.865122 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:45Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.885463 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.885520 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.885535 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.885558 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.885579 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:45Z","lastTransitionTime":"2026-02-03T07:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.987777 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.987879 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.987925 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.987950 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:45 crc kubenswrapper[4708]: I0203 07:11:45.987967 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:45Z","lastTransitionTime":"2026-02-03T07:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.092019 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.092177 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.092228 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.092236 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.092027 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.092250 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.092336 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:46Z","lastTransitionTime":"2026-02-03T07:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:46 crc kubenswrapper[4708]: E0203 07:11:46.092368 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:46 crc kubenswrapper[4708]: E0203 07:11:46.092581 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.117180 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 22:00:43.381965453 +0000 UTC Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.195415 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.195496 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.195515 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.195538 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.195556 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:46Z","lastTransitionTime":"2026-02-03T07:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.298842 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.298885 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.298899 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.298918 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.298930 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:46Z","lastTransitionTime":"2026-02-03T07:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.404678 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.404729 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.404742 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.404759 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.404776 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:46Z","lastTransitionTime":"2026-02-03T07:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.508072 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.508140 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.508162 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.508359 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.508378 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:46Z","lastTransitionTime":"2026-02-03T07:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.610784 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/3.log" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.611166 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.611218 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.611238 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.611263 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.611284 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:46Z","lastTransitionTime":"2026-02-03T07:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.616882 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:11:46 crc kubenswrapper[4708]: E0203 07:11:46.617214 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.631679 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.649196 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.666935 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.684960 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.699675 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.713959 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.714015 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.714030 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.714049 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.714085 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:46Z","lastTransitionTime":"2026-02-03T07:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.717564 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.731064 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.747397 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.763920 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.781182 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.795884 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.810294 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.816699 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.816765 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.816780 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.816813 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.816823 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:46Z","lastTransitionTime":"2026-02-03T07:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.827239 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:31Z\\\",\\\"message\\\":\\\"2026-02-03T07:10:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea\\\\n2026-02-03T07:10:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea to /host/opt/cni/bin/\\\\n2026-02-03T07:10:46Z [verbose] multus-daemon started\\\\n2026-02-03T07:10:46Z [verbose] Readiness Indicator file check\\\\n2026-02-03T07:11:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.856086 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:45Z\\\",\\\"message\\\":\\\"-749d76644c-4f7fx\\\\nI0203 07:11:45.163816 6827 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0203 07:11:45.165974 6827 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0203 07:11:45.165989 6827 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI0203 07:11:45.166000 6827 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI0203 07:11:45.166009 6827 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0203 07:11:45.163835 6827 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI0203 07:11:45.166028 6827 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI0203 07:11:45.166038 6827 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nF0203 07:11:45.163850 6827 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.875203 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e0b9c8a-9915-45ef-adfe-8a3f94b838f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed87f447ce590518e6c11016afed10c275ec305ce2f1569f049fa9e72e314389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81f8040a3903c93c6a8e700733e6d306d4d057145562bca18df93f84e05c130\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53af1b2767b38d2af1ddc24c344dda295c79bdf418f155137756679ba689cd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5281fb425defd1d58d168728e9d0c9c3e284733bf99c8657b71c3d70ad7faad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b91ee37b2b4a9d611dce015828570fb8b69da2b4cdbfb14452f58304390de9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.889486 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.902781 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.913507 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:46Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.919017 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.919066 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.919081 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.919103 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:46 crc kubenswrapper[4708]: I0203 07:11:46.919120 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:46Z","lastTransitionTime":"2026-02-03T07:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.022183 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.022243 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.022274 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.022312 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.022338 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:47Z","lastTransitionTime":"2026-02-03T07:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.092042 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.092053 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.092212 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.092564 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.118313 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 10:01:00.2643383 +0000 UTC Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.125853 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.125916 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.125937 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.125964 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.125982 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:47Z","lastTransitionTime":"2026-02-03T07:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.229718 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.229776 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.229839 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.229874 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.229894 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:47Z","lastTransitionTime":"2026-02-03T07:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.332352 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.332402 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.332413 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.332430 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.332442 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:47Z","lastTransitionTime":"2026-02-03T07:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.435211 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.435561 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.435705 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.436021 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.436077 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:47Z","lastTransitionTime":"2026-02-03T07:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.539166 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.539205 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.539215 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.539230 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.539243 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:47Z","lastTransitionTime":"2026-02-03T07:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.641923 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.641995 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.642011 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.642039 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.642062 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:47Z","lastTransitionTime":"2026-02-03T07:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.745430 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.745471 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.745481 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.745495 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.745506 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:47Z","lastTransitionTime":"2026-02-03T07:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.756074 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.756189 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.756224 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.756331 4708 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.756368 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.756340942 +0000 UTC m=+150.738287749 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.756410 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.756399193 +0000 UTC m=+150.738346000 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.756461 4708 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.756548 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.756525977 +0000 UTC m=+150.738472774 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.847953 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.848014 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.848032 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.848058 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.848075 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:47Z","lastTransitionTime":"2026-02-03T07:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.856595 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.856663 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.856788 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.856823 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.856834 4708 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.856923 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.856909408 +0000 UTC m=+150.838856215 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.857034 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.857082 4708 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.857104 4708 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:11:47 crc kubenswrapper[4708]: E0203 07:11:47.857186 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.857162984 +0000 UTC m=+150.839109831 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.951096 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.951143 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.951151 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.951188 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:47 crc kubenswrapper[4708]: I0203 07:11:47.951198 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:47Z","lastTransitionTime":"2026-02-03T07:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.060376 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.060438 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.060448 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.060470 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.060485 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:48Z","lastTransitionTime":"2026-02-03T07:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.092450 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:48 crc kubenswrapper[4708]: E0203 07:11:48.092633 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.093083 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:48 crc kubenswrapper[4708]: E0203 07:11:48.093239 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.109157 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.119443 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 17:12:11.392219183 +0000 UTC Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.163608 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.163656 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.163667 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.163684 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.163696 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:48Z","lastTransitionTime":"2026-02-03T07:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.266620 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.266677 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.266694 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.266720 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.266738 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:48Z","lastTransitionTime":"2026-02-03T07:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.369436 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.369487 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.369504 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.369528 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.369546 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:48Z","lastTransitionTime":"2026-02-03T07:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.472376 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.472417 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.472427 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.472445 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.472456 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:48Z","lastTransitionTime":"2026-02-03T07:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.575969 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.576066 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.576086 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.576115 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.576135 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:48Z","lastTransitionTime":"2026-02-03T07:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.678861 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.678899 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.678911 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.678928 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.678940 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:48Z","lastTransitionTime":"2026-02-03T07:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.781011 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.781072 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.781089 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.781111 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.781130 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:48Z","lastTransitionTime":"2026-02-03T07:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.883287 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.883342 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.883353 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.883370 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.883383 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:48Z","lastTransitionTime":"2026-02-03T07:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.986514 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.986564 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.986577 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.986593 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:48 crc kubenswrapper[4708]: I0203 07:11:48.986605 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:48Z","lastTransitionTime":"2026-02-03T07:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.089310 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.089384 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.089408 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.089449 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.089471 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:49Z","lastTransitionTime":"2026-02-03T07:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.092700 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.092923 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:49 crc kubenswrapper[4708]: E0203 07:11:49.093032 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:49 crc kubenswrapper[4708]: E0203 07:11:49.093128 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.120349 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 02:03:17.16796749 +0000 UTC Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.192717 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.192760 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.192770 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.192786 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.192822 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:49Z","lastTransitionTime":"2026-02-03T07:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.295612 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.295656 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.295670 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.295689 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.295704 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:49Z","lastTransitionTime":"2026-02-03T07:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.397414 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.397448 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.397459 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.397474 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.397485 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:49Z","lastTransitionTime":"2026-02-03T07:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.499682 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.499716 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.499726 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.499742 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.499761 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:49Z","lastTransitionTime":"2026-02-03T07:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.602507 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.602562 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.602577 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.602597 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.602613 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:49Z","lastTransitionTime":"2026-02-03T07:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.705329 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.705384 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.705404 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.705428 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.705447 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:49Z","lastTransitionTime":"2026-02-03T07:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.808433 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.808490 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.808507 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.808529 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.808546 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:49Z","lastTransitionTime":"2026-02-03T07:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.911207 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.911265 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.911283 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.911309 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:49 crc kubenswrapper[4708]: I0203 07:11:49.911327 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:49Z","lastTransitionTime":"2026-02-03T07:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.013722 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.013837 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.013859 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.013885 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.013903 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:50Z","lastTransitionTime":"2026-02-03T07:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.092897 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.092909 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:50 crc kubenswrapper[4708]: E0203 07:11:50.093086 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:50 crc kubenswrapper[4708]: E0203 07:11:50.093192 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.116902 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.117012 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.117035 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.117062 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.117083 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:50Z","lastTransitionTime":"2026-02-03T07:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.121208 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 23:01:54.782851946 +0000 UTC Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.220640 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.221047 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.221208 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.221384 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.221541 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:50Z","lastTransitionTime":"2026-02-03T07:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.323971 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.324017 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.324032 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.324047 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.324058 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:50Z","lastTransitionTime":"2026-02-03T07:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.428043 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.428434 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.428629 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.428834 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.428997 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:50Z","lastTransitionTime":"2026-02-03T07:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.532257 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.532310 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.532326 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.532348 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.532364 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:50Z","lastTransitionTime":"2026-02-03T07:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.638396 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.638480 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.638505 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.638535 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.638566 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:50Z","lastTransitionTime":"2026-02-03T07:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.742415 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.742442 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.742454 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.742470 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.742481 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:50Z","lastTransitionTime":"2026-02-03T07:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.845623 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.845699 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.845722 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.845751 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.845773 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:50Z","lastTransitionTime":"2026-02-03T07:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.948646 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.948726 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.948760 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.948835 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:50 crc kubenswrapper[4708]: I0203 07:11:50.948861 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:50Z","lastTransitionTime":"2026-02-03T07:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.052199 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.052261 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.052279 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.052303 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.052321 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.091999 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.092135 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:51 crc kubenswrapper[4708]: E0203 07:11:51.092330 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:51 crc kubenswrapper[4708]: E0203 07:11:51.092466 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.121406 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 11:08:52.708535175 +0000 UTC Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.155034 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.155095 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.155113 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.155137 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.155154 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.257852 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.258115 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.258139 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.258167 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.258193 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.299010 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.299060 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.299073 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.299087 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.299098 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: E0203 07:11:51.318809 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.323996 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.324039 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.324054 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.324075 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.324089 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: E0203 07:11:51.342082 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.346774 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.346851 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.346867 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.346912 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.346929 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: E0203 07:11:51.364589 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.369482 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.369555 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.369572 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.369597 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.369614 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: E0203 07:11:51.385514 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.389685 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.389752 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.389765 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.389782 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.389843 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: E0203 07:11:51.404736 4708 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"05e8cdbc-cdcf-43c6-8fa3-7e8e85ffcd4d\\\",\\\"systemUUID\\\":\\\"d4343d62-2ace-40c7-95b1-99d083ef1c91\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:51Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:51 crc kubenswrapper[4708]: E0203 07:11:51.404949 4708 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.406901 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.406945 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.406957 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.406974 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.406988 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.510544 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.510610 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.510641 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.510694 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.510712 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.614171 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.614244 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.614262 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.614285 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.614302 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.717647 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.717724 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.717747 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.717775 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.717861 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.820955 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.821032 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.821065 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.821095 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.821113 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.924092 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.924193 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.924211 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.924240 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:51 crc kubenswrapper[4708]: I0203 07:11:51.924281 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:51Z","lastTransitionTime":"2026-02-03T07:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.027225 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.027308 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.027350 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.027388 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.027412 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:52Z","lastTransitionTime":"2026-02-03T07:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.092113 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.092245 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:52 crc kubenswrapper[4708]: E0203 07:11:52.092318 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:52 crc kubenswrapper[4708]: E0203 07:11:52.092441 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.118351 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7d05e11176218b9dd24c5d37c0c09c2f2b10789b38b34a251c20748bd6f86f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eea783ae80f7dea1df375831fcae70d510323cd6432a456169f33a566b64d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.122464 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 00:26:56.659425689 +0000 UTC Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.131903 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.131958 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.131976 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.132001 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.132018 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:52Z","lastTransitionTime":"2026-02-03T07:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.136868 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.160207 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b5667f2-69df-408c-81af-c50c160ad409\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f1aa484aa9579cb96365d5a27132c7188c52d9b2d8e07a4a6367933f4305d5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed8baef928ab1a5c99a3b74e954ffd1824546ac57368d638611a87eb493dd3b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ed55006fa1340642fb25700354c0e1816af3bdc0aa1c5eddfed59348fea0ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12f3c4ca75332252283a893705fea2148b4c13098daed8caf92440587b2200c2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb5af146df8f0ac9b22a1e49a42eefa499f1454d1be50b4a7dbeba13059adcb6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b9f68ad6a663bf8f82ccd13ff703944d0aa9d2df66acd3f5ee01d19c1706f5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7859c62f62ebb2f4e707dcafbd219277172336e42aa0142f877f87eef09cfa7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9kx9t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fj5fc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.179214 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c85ceeba-ec54-4325-af45-7a9176cb62a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e632f23c70ccfc0d7a4bdc9f03ca71f0c29b7a1b446b9b1ee14ede18ee1682b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://74d6f26de36eecd83615f1e4f61bb111775073dff67ca10a94a65f436569de13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2hhbv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4f7fx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.199561 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://683715d6e8b3e721c73d3b554b026f1ad2796ec64fe7a21192ece89937c0a1ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.219874 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5c77e02-3759-4cf1-9952-ffeffc2bf7cf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e28c7820a6c7a758468366fd037570c94e2e7fa0e1b1383572572e9f5e9c90c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a72c95710bf91e6e945e1c87e81ca7bb1c45736af741186878d43e8d44cec3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de545ea0d1c7a9dc2abbabf5608bd9c24d017b16ac640fac8d033d5b2f9e3e13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00bbcc3fd4dd42221c01f2c794aaac720d860f9214d6473ddca8ca96d5991442\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.235088 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.235156 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.235217 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.235243 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.235260 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:52Z","lastTransitionTime":"2026-02-03T07:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.237709 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1de60d34c75ab68a42acc92a1244cdeccaa8b758d66704e4c10873103f503902\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.256287 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2fzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7cedfe91-d1c3-4c56-9aac-797ecade9468\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:31Z\\\",\\\"message\\\":\\\"2026-02-03T07:10:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea\\\\n2026-02-03T07:10:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a9d1b301-d7e3-4c96-b77a-0ab1103474ea to /host/opt/cni/bin/\\\\n2026-02-03T07:10:46Z [verbose] multus-daemon started\\\\n2026-02-03T07:10:46Z [verbose] Readiness Indicator file check\\\\n2026-02-03T07:11:31Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w7g6k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2fzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.280129 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b0d14461-efec-4909-82de-2cce585892a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-03T07:11:45Z\\\",\\\"message\\\":\\\"-749d76644c-4f7fx\\\\nI0203 07:11:45.163816 6827 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0203 07:11:45.165974 6827 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0203 07:11:45.165989 6827 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI0203 07:11:45.166000 6827 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI0203 07:11:45.166009 6827 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0203 07:11:45.163835 6827 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI0203 07:11:45.166028 6827 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI0203 07:11:45.166038 6827 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nF0203 07:11:45.163850 6827 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:11:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qtfmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2sfqf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.297124 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52001e35-dcef-46ba-8efc-b6e10ca5369a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b121502cbbc5d6939bae0b53cdad9d1bc3cfbc0e01409cb3126b30fd2cc2b60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3f8941fd56ae24ecbaf6835dbb868af8461255c5da992ee342ef44461502108\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://159ed4bd3b5a484ab337deee2490471b97c2a3631afdea49b5ec96152e99e674\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.316249 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee6cd805-5c9a-49ab-a83e-3bd1437838f0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-03T07:10:43Z\\\",\\\"message\\\":\\\":10:43.201197 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0203 07:10:43.201275 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0203 07:10:43.204304 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204353 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204382 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0203 07:10:43.204365 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0203 07:10:43.204484 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0203 07:10:43.204491 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0203 07:10:43.204527 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0203 07:10:43.204537 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0203 07:10:43.204624 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\"\\\\nI0203 07:10:43.204685 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3606329456/tls.crt::/tmp/serving-cert-3606329456/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1770102627\\\\\\\\\\\\\\\" (2026-02-03 07:10:26 +0000 UTC to 2026-03-05 07:10:27 +0000 UTC (now=2026-02-03 07:10:43.204645643 +0000 UTC))\\\\\\\"\\\\nF0203 07:10:43.204777 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:11:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.333522 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.338141 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.338188 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.338200 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.338222 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.338238 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:52Z","lastTransitionTime":"2026-02-03T07:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.346693 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6thl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851add34-7566-4ed5-b70a-c7935eb26e4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-djvg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6thl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.374853 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0e0b9c8a-9915-45ef-adfe-8a3f94b838f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed87f447ce590518e6c11016afed10c275ec305ce2f1569f049fa9e72e314389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c81f8040a3903c93c6a8e700733e6d306d4d057145562bca18df93f84e05c130\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53af1b2767b38d2af1ddc24c344dda295c79bdf418f155137756679ba689cd47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5281fb425defd1d58d168728e9d0c9c3e284733bf99c8657b71c3d70ad7faad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b91ee37b2b4a9d611dce015828570fb8b69da2b4cdbfb14452f58304390de9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00354562c08cbf28cc0aeeea5857f0a3c6720c61dd9b54d1e1033f590cea004c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2764c5a5c9edde2a19bde6f6b63e264f88e124ee3ad739a03fcbaa60ad5516d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f1e6bdad08093b55f4f4d6bbd90470546dd1f6579765bf2b7bc31f82fe116cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.392094 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.405882 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnhzd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50972c31-a8a8-4de2-a88e-9b77c33a1b0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d141bea5a6b5fdc2686db825966155250e18ca2caad00a635273d5b776b2b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9k9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnhzd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.422263 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67498414-5132-496e-9638-189f5941ace0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eedf812b44769143ee725a1bab971c6efafcf5d891132ec17d41f2eeb71e37ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-82zhj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-r94bn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.437314 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nr7n2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b38d3e3-ef31-43ed-88fe-a896d4e73c7d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2035c2f213ff4052835e3d21c88e712ec2c5e38be22ead95fe94d176c2dc88a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zxtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nr7n2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.441915 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.442005 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.442023 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.442046 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.442064 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:52Z","lastTransitionTime":"2026-02-03T07:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.454280 4708 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7181f5bf-e71d-4077-b6cd-f72f5116363f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-03T07:10:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://777f2d524739d804f111dad8fa87947eada3bd79860cc39a14fda6cc1d70f3ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-03T07:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04ceb2c410480eae93a9968759183b1db1300b005b3904807c7b6393f7866ae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://04ceb2c410480eae93a9968759183b1db1300b005b3904807c7b6393f7866ae6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-03T07:10:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-03T07:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-03T07:10:22Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-03T07:11:52Z is after 2025-08-24T17:21:41Z" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.544617 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.544670 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.544688 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.544712 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.544726 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:52Z","lastTransitionTime":"2026-02-03T07:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.647331 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.648015 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.648042 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.648078 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.648100 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:52Z","lastTransitionTime":"2026-02-03T07:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.751482 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.751531 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.751546 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.751567 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.751580 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:52Z","lastTransitionTime":"2026-02-03T07:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.854664 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.854715 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.854732 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.854756 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.854776 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:52Z","lastTransitionTime":"2026-02-03T07:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.957220 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.957278 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.957300 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.957484 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:52 crc kubenswrapper[4708]: I0203 07:11:52.957509 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:52Z","lastTransitionTime":"2026-02-03T07:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.061116 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.061186 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.061211 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.061241 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.061260 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:53Z","lastTransitionTime":"2026-02-03T07:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.092080 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.092080 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:53 crc kubenswrapper[4708]: E0203 07:11:53.092253 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:53 crc kubenswrapper[4708]: E0203 07:11:53.092368 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.123534 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 08:22:31.493893642 +0000 UTC Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.164027 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.164081 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.164094 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.164111 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.164123 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:53Z","lastTransitionTime":"2026-02-03T07:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.267204 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.267252 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.267262 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.267278 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.267288 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:53Z","lastTransitionTime":"2026-02-03T07:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.370402 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.370513 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.370533 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.370563 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.370583 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:53Z","lastTransitionTime":"2026-02-03T07:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.473292 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.473357 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.473380 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.473411 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.473435 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:53Z","lastTransitionTime":"2026-02-03T07:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.576771 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.576913 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.576931 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.576953 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.576969 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:53Z","lastTransitionTime":"2026-02-03T07:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.679568 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.679646 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.679669 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.679691 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.679708 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:53Z","lastTransitionTime":"2026-02-03T07:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.783242 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.783323 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.783354 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.783386 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.783408 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:53Z","lastTransitionTime":"2026-02-03T07:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.886404 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.886476 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.886494 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.886518 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.886536 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:53Z","lastTransitionTime":"2026-02-03T07:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.989593 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.989660 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.989682 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.989706 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:53 crc kubenswrapper[4708]: I0203 07:11:53.989725 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:53Z","lastTransitionTime":"2026-02-03T07:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.092903 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.093004 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:54 crc kubenswrapper[4708]: E0203 07:11:54.093152 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:54 crc kubenswrapper[4708]: E0203 07:11:54.093267 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.093526 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.093661 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.093681 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.093705 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.093721 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:54Z","lastTransitionTime":"2026-02-03T07:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.124614 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 22:46:18.554366368 +0000 UTC Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.197942 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.198054 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.198076 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.198101 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.198118 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:54Z","lastTransitionTime":"2026-02-03T07:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.301493 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.301560 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.301576 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.301599 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.301618 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:54Z","lastTransitionTime":"2026-02-03T07:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.404841 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.404904 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.404965 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.404993 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.405010 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:54Z","lastTransitionTime":"2026-02-03T07:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.508108 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.508200 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.508257 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.508297 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.508354 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:54Z","lastTransitionTime":"2026-02-03T07:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.611501 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.611575 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.611594 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.611623 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.611643 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:54Z","lastTransitionTime":"2026-02-03T07:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.715403 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.715475 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.715500 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.715530 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.715551 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:54Z","lastTransitionTime":"2026-02-03T07:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.818352 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.818396 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.818406 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.818422 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.818433 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:54Z","lastTransitionTime":"2026-02-03T07:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.921152 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.921220 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.921238 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.921260 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:54 crc kubenswrapper[4708]: I0203 07:11:54.921279 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:54Z","lastTransitionTime":"2026-02-03T07:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.024385 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.024458 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.024470 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.024513 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.024528 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:55Z","lastTransitionTime":"2026-02-03T07:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.092641 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.092762 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:55 crc kubenswrapper[4708]: E0203 07:11:55.092963 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:55 crc kubenswrapper[4708]: E0203 07:11:55.093111 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.125192 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 06:27:57.345078604 +0000 UTC Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.127425 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.127469 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.127480 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.127497 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.127511 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:55Z","lastTransitionTime":"2026-02-03T07:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.230897 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.230942 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.230954 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.230972 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.230984 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:55Z","lastTransitionTime":"2026-02-03T07:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.333904 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.333972 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.333988 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.334011 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.334028 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:55Z","lastTransitionTime":"2026-02-03T07:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.436583 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.436621 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.436630 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.436642 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.436652 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:55Z","lastTransitionTime":"2026-02-03T07:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.539550 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.539612 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.539633 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.539663 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.539685 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:55Z","lastTransitionTime":"2026-02-03T07:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.642585 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.642651 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.642667 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.642690 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.642706 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:55Z","lastTransitionTime":"2026-02-03T07:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.744736 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.744781 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.744828 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.744846 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.744858 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:55Z","lastTransitionTime":"2026-02-03T07:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.847089 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.847123 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.847131 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.847143 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.847151 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:55Z","lastTransitionTime":"2026-02-03T07:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.949895 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.949929 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.949936 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.949948 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:55 crc kubenswrapper[4708]: I0203 07:11:55.949958 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:55Z","lastTransitionTime":"2026-02-03T07:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.053961 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.054074 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.054100 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.054132 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.054154 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:56Z","lastTransitionTime":"2026-02-03T07:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.092832 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.092963 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:56 crc kubenswrapper[4708]: E0203 07:11:56.093092 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:56 crc kubenswrapper[4708]: E0203 07:11:56.093164 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.126098 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 22:58:53.047630251 +0000 UTC Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.156925 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.157025 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.157046 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.157067 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.157082 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:56Z","lastTransitionTime":"2026-02-03T07:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.260160 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.260197 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.260205 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.260216 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.260225 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:56Z","lastTransitionTime":"2026-02-03T07:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.363469 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.363524 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.363535 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.363552 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.363563 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:56Z","lastTransitionTime":"2026-02-03T07:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.466324 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.466371 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.466384 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.466405 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.466418 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:56Z","lastTransitionTime":"2026-02-03T07:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.568507 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.569565 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.569658 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.569746 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.569856 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:56Z","lastTransitionTime":"2026-02-03T07:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.672857 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.672925 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.672943 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.672968 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.672986 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:56Z","lastTransitionTime":"2026-02-03T07:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.775334 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.775384 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.775398 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.775417 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.775434 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:56Z","lastTransitionTime":"2026-02-03T07:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.879261 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.879305 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.879315 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.879334 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.879345 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:56Z","lastTransitionTime":"2026-02-03T07:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.981910 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.981964 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.981975 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.981992 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:56 crc kubenswrapper[4708]: I0203 07:11:56.982004 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:56Z","lastTransitionTime":"2026-02-03T07:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.084900 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.085217 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.085295 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.085376 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.085440 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:57Z","lastTransitionTime":"2026-02-03T07:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.092405 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.092419 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:57 crc kubenswrapper[4708]: E0203 07:11:57.093279 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:57 crc kubenswrapper[4708]: E0203 07:11:57.093567 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.126554 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 05:40:45.108370404 +0000 UTC Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.188738 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.188814 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.188832 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.188854 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.188872 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:57Z","lastTransitionTime":"2026-02-03T07:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.290882 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.290929 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.290939 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.290958 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.290970 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:57Z","lastTransitionTime":"2026-02-03T07:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.394388 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.394420 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.394431 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.394444 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.394455 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:57Z","lastTransitionTime":"2026-02-03T07:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.496883 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.496922 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.496940 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.496956 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.496966 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:57Z","lastTransitionTime":"2026-02-03T07:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.600469 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.600533 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.600555 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.600584 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.600606 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:57Z","lastTransitionTime":"2026-02-03T07:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.703598 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.703665 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.703688 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.703715 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.703735 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:57Z","lastTransitionTime":"2026-02-03T07:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.806900 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.806978 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.806999 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.807027 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.807049 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:57Z","lastTransitionTime":"2026-02-03T07:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.909643 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.909701 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.909721 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.909744 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:57 crc kubenswrapper[4708]: I0203 07:11:57.909761 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:57Z","lastTransitionTime":"2026-02-03T07:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.012508 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.012556 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.012573 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.012594 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.012611 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:58Z","lastTransitionTime":"2026-02-03T07:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.092416 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:11:58 crc kubenswrapper[4708]: E0203 07:11:58.092556 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.092417 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:11:58 crc kubenswrapper[4708]: E0203 07:11:58.092930 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.114495 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.114542 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.114556 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.114571 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.114584 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:58Z","lastTransitionTime":"2026-02-03T07:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.127160 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 09:15:00.741897963 +0000 UTC Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.217245 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.217308 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.217325 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.217349 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.217366 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:58Z","lastTransitionTime":"2026-02-03T07:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.320302 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.320356 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.320372 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.320395 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.320412 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:58Z","lastTransitionTime":"2026-02-03T07:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.424561 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.424620 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.424630 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.424844 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.424861 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:58Z","lastTransitionTime":"2026-02-03T07:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.528262 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.528328 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.528344 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.528367 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.528387 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:58Z","lastTransitionTime":"2026-02-03T07:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.631142 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.631180 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.631194 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.631213 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.631229 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:58Z","lastTransitionTime":"2026-02-03T07:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.733470 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.733514 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.733525 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.733542 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.733554 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:58Z","lastTransitionTime":"2026-02-03T07:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.835343 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.835687 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.835826 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.835937 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.836057 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:58Z","lastTransitionTime":"2026-02-03T07:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.939480 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.939520 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.939530 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.939544 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:58 crc kubenswrapper[4708]: I0203 07:11:58.939553 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:58Z","lastTransitionTime":"2026-02-03T07:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.043434 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.043864 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.044139 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.044362 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.044510 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:59Z","lastTransitionTime":"2026-02-03T07:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.092349 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.092415 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:11:59 crc kubenswrapper[4708]: E0203 07:11:59.093111 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:11:59 crc kubenswrapper[4708]: E0203 07:11:59.093119 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.127497 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 05:48:44.492406755 +0000 UTC Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.147553 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.147611 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.147637 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.147666 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.147687 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:59Z","lastTransitionTime":"2026-02-03T07:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.249969 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.250037 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.250061 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.250090 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.250112 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:59Z","lastTransitionTime":"2026-02-03T07:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.352834 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.352902 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.352922 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.352947 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.352966 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:59Z","lastTransitionTime":"2026-02-03T07:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.455768 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.455840 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.455853 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.455871 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.455884 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:59Z","lastTransitionTime":"2026-02-03T07:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.558935 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.558971 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.558983 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.558999 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.559010 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:59Z","lastTransitionTime":"2026-02-03T07:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.661506 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.661583 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.661600 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.661623 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.661639 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:59Z","lastTransitionTime":"2026-02-03T07:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.763865 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.763916 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.763934 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.763955 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.763971 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:59Z","lastTransitionTime":"2026-02-03T07:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.866212 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.866249 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.866257 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.866270 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.866278 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:59Z","lastTransitionTime":"2026-02-03T07:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.969211 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.969260 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.969275 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.969293 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:11:59 crc kubenswrapper[4708]: I0203 07:11:59.969305 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:11:59Z","lastTransitionTime":"2026-02-03T07:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.071962 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.072024 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.072040 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.072062 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.072081 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:00Z","lastTransitionTime":"2026-02-03T07:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.092493 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.092516 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:00 crc kubenswrapper[4708]: E0203 07:12:00.092643 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:00 crc kubenswrapper[4708]: E0203 07:12:00.092724 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.128522 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 15:45:18.872772867 +0000 UTC Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.175774 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.175874 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.175886 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.175903 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.175915 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:00Z","lastTransitionTime":"2026-02-03T07:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.277771 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.277862 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.277880 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.277897 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.277928 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:00Z","lastTransitionTime":"2026-02-03T07:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.380736 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.380827 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.380846 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.380869 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.380884 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:00Z","lastTransitionTime":"2026-02-03T07:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.483626 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.483665 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.483674 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.483708 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.483719 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:00Z","lastTransitionTime":"2026-02-03T07:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.585477 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.585526 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.585542 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.585564 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.585581 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:00Z","lastTransitionTime":"2026-02-03T07:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.688610 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.689044 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.689154 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.689247 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.689346 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:00Z","lastTransitionTime":"2026-02-03T07:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.792110 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.792424 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.792535 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.792648 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.792744 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:00Z","lastTransitionTime":"2026-02-03T07:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.897420 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.897463 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.897475 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.897496 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:00 crc kubenswrapper[4708]: I0203 07:12:00.897515 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:00Z","lastTransitionTime":"2026-02-03T07:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.000912 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.000958 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.000967 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.000982 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.000990 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:01Z","lastTransitionTime":"2026-02-03T07:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.092624 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.092720 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:01 crc kubenswrapper[4708]: E0203 07:12:01.092896 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:01 crc kubenswrapper[4708]: E0203 07:12:01.093111 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.095041 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:12:01 crc kubenswrapper[4708]: E0203 07:12:01.095527 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.104253 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.104311 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.104329 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.104353 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.104372 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:01Z","lastTransitionTime":"2026-02-03T07:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.129655 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 19:02:09.542229336 +0000 UTC Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.207231 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.207269 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.207279 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.207293 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.207305 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:01Z","lastTransitionTime":"2026-02-03T07:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.309780 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.309843 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.309854 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.309871 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.309882 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:01Z","lastTransitionTime":"2026-02-03T07:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.411730 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.411773 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.411784 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.411824 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.411836 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:01Z","lastTransitionTime":"2026-02-03T07:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.514238 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.514519 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.514627 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.514731 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.514848 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:01Z","lastTransitionTime":"2026-02-03T07:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.569355 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.569409 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.569421 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.569439 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.569451 4708 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-03T07:12:01Z","lastTransitionTime":"2026-02-03T07:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.626481 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx"] Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.627179 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.630520 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.630716 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.630890 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.636557 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.645236 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=78.645216576 podStartE2EDuration="1m18.645216576s" podCreationTimestamp="2026-02-03 07:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.645187695 +0000 UTC m=+100.627134502" watchObservedRunningTime="2026-02-03 07:12:01.645216576 +0000 UTC m=+100.627163383" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.654885 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=51.654866875 podStartE2EDuration="51.654866875s" podCreationTimestamp="2026-02-03 07:11:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.654834524 +0000 UTC m=+100.636781331" watchObservedRunningTime="2026-02-03 07:12:01.654866875 +0000 UTC m=+100.636813672" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.707602 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a1839e63-ec0e-4194-9796-40d15c13cc0f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.707731 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1839e63-ec0e-4194-9796-40d15c13cc0f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.707787 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a1839e63-ec0e-4194-9796-40d15c13cc0f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.707842 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a1839e63-ec0e-4194-9796-40d15c13cc0f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.707932 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1839e63-ec0e-4194-9796-40d15c13cc0f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.708511 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-f2fzr" podStartSLOduration=77.708492449 podStartE2EDuration="1m17.708492449s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.677311574 +0000 UTC m=+100.659258381" watchObservedRunningTime="2026-02-03 07:12:01.708492449 +0000 UTC m=+100.690439256" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.751963 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=20.75193789 podStartE2EDuration="20.75193789s" podCreationTimestamp="2026-02-03 07:11:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.750764979 +0000 UTC m=+100.732711786" watchObservedRunningTime="2026-02-03 07:12:01.75193789 +0000 UTC m=+100.733884727" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.768222 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=78.76820284 podStartE2EDuration="1m18.76820284s" podCreationTimestamp="2026-02-03 07:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.768137437 +0000 UTC m=+100.750084264" watchObservedRunningTime="2026-02-03 07:12:01.76820284 +0000 UTC m=+100.750149647" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.809350 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a1839e63-ec0e-4194-9796-40d15c13cc0f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.809715 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1839e63-ec0e-4194-9796-40d15c13cc0f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.809898 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a1839e63-ec0e-4194-9796-40d15c13cc0f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.810012 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a1839e63-ec0e-4194-9796-40d15c13cc0f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.809465 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a1839e63-ec0e-4194-9796-40d15c13cc0f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.810248 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a1839e63-ec0e-4194-9796-40d15c13cc0f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.810462 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1839e63-ec0e-4194-9796-40d15c13cc0f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.812038 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a1839e63-ec0e-4194-9796-40d15c13cc0f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.816477 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1839e63-ec0e-4194-9796-40d15c13cc0f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.840953 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=13.840935736 podStartE2EDuration="13.840935736s" podCreationTimestamp="2026-02-03 07:11:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.840438154 +0000 UTC m=+100.822384971" watchObservedRunningTime="2026-02-03 07:12:01.840935736 +0000 UTC m=+100.822882553" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.850844 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1839e63-ec0e-4194-9796-40d15c13cc0f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9h6xx\" (UID: \"a1839e63-ec0e-4194-9796-40d15c13cc0f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.913750 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-xnhzd" podStartSLOduration=78.913732875 podStartE2EDuration="1m18.913732875s" podCreationTimestamp="2026-02-03 07:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.883808603 +0000 UTC m=+100.865755450" watchObservedRunningTime="2026-02-03 07:12:01.913732875 +0000 UTC m=+100.895679682" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.913868 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podStartSLOduration=77.913862599 podStartE2EDuration="1m17.913862599s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.904147248 +0000 UTC m=+100.886094075" watchObservedRunningTime="2026-02-03 07:12:01.913862599 +0000 UTC m=+100.895809416" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.918690 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-nr7n2" podStartSLOduration=77.918673123 podStartE2EDuration="1m17.918673123s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.91817001 +0000 UTC m=+100.900116817" watchObservedRunningTime="2026-02-03 07:12:01.918673123 +0000 UTC m=+100.900619940" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.947044 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.983402 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-fj5fc" podStartSLOduration=77.983385013 podStartE2EDuration="1m17.983385013s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.982821149 +0000 UTC m=+100.964767956" watchObservedRunningTime="2026-02-03 07:12:01.983385013 +0000 UTC m=+100.965331820" Feb 03 07:12:01 crc kubenswrapper[4708]: I0203 07:12:01.994725 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4f7fx" podStartSLOduration=77.994707275 podStartE2EDuration="1m17.994707275s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:01.994391637 +0000 UTC m=+100.976338454" watchObservedRunningTime="2026-02-03 07:12:01.994707275 +0000 UTC m=+100.976654102" Feb 03 07:12:02 crc kubenswrapper[4708]: I0203 07:12:02.093134 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:02 crc kubenswrapper[4708]: E0203 07:12:02.093879 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:02 crc kubenswrapper[4708]: I0203 07:12:02.094081 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:02 crc kubenswrapper[4708]: E0203 07:12:02.094170 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:02 crc kubenswrapper[4708]: I0203 07:12:02.131374 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 17:52:49.915590373 +0000 UTC Feb 03 07:12:02 crc kubenswrapper[4708]: I0203 07:12:02.131450 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Feb 03 07:12:02 crc kubenswrapper[4708]: I0203 07:12:02.139849 4708 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 03 07:12:02 crc kubenswrapper[4708]: I0203 07:12:02.665756 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" event={"ID":"a1839e63-ec0e-4194-9796-40d15c13cc0f","Type":"ContainerStarted","Data":"0486f2fe066d536584efcd0e756fa5e4f71cd00e3cebb48b57575c25d7307c1d"} Feb 03 07:12:02 crc kubenswrapper[4708]: I0203 07:12:02.665834 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" event={"ID":"a1839e63-ec0e-4194-9796-40d15c13cc0f","Type":"ContainerStarted","Data":"2e815ca76695742c614d9511c264797e76754bff39cb5dde1443ff279192951c"} Feb 03 07:12:02 crc kubenswrapper[4708]: I0203 07:12:02.678761 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9h6xx" podStartSLOduration=78.678739969 podStartE2EDuration="1m18.678739969s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:02.678285838 +0000 UTC m=+101.660232645" watchObservedRunningTime="2026-02-03 07:12:02.678739969 +0000 UTC m=+101.660686806" Feb 03 07:12:03 crc kubenswrapper[4708]: I0203 07:12:03.092081 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:03 crc kubenswrapper[4708]: E0203 07:12:03.092271 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:03 crc kubenswrapper[4708]: I0203 07:12:03.092090 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:03 crc kubenswrapper[4708]: E0203 07:12:03.092483 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:03 crc kubenswrapper[4708]: I0203 07:12:03.224475 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:03 crc kubenswrapper[4708]: E0203 07:12:03.224656 4708 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:12:03 crc kubenswrapper[4708]: E0203 07:12:03.224756 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs podName:851add34-7566-4ed5-b70a-c7935eb26e4f nodeName:}" failed. No retries permitted until 2026-02-03 07:13:07.224728451 +0000 UTC m=+166.206675278 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs") pod "network-metrics-daemon-6thl9" (UID: "851add34-7566-4ed5-b70a-c7935eb26e4f") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 03 07:12:04 crc kubenswrapper[4708]: I0203 07:12:04.091972 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:04 crc kubenswrapper[4708]: I0203 07:12:04.092036 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:04 crc kubenswrapper[4708]: E0203 07:12:04.092147 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:04 crc kubenswrapper[4708]: E0203 07:12:04.092260 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:05 crc kubenswrapper[4708]: I0203 07:12:05.092852 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:05 crc kubenswrapper[4708]: E0203 07:12:05.093230 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:05 crc kubenswrapper[4708]: I0203 07:12:05.093127 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:05 crc kubenswrapper[4708]: I0203 07:12:05.092900 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:05 crc kubenswrapper[4708]: E0203 07:12:05.093300 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:05 crc kubenswrapper[4708]: E0203 07:12:05.093389 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:06 crc kubenswrapper[4708]: I0203 07:12:06.092968 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:06 crc kubenswrapper[4708]: E0203 07:12:06.093202 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:07 crc kubenswrapper[4708]: I0203 07:12:07.091896 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:07 crc kubenswrapper[4708]: I0203 07:12:07.091950 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:07 crc kubenswrapper[4708]: E0203 07:12:07.091986 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:07 crc kubenswrapper[4708]: I0203 07:12:07.091891 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:07 crc kubenswrapper[4708]: E0203 07:12:07.092167 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:07 crc kubenswrapper[4708]: E0203 07:12:07.092304 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:08 crc kubenswrapper[4708]: I0203 07:12:08.092344 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:08 crc kubenswrapper[4708]: E0203 07:12:08.092469 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:09 crc kubenswrapper[4708]: I0203 07:12:09.092304 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:09 crc kubenswrapper[4708]: E0203 07:12:09.092433 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:09 crc kubenswrapper[4708]: I0203 07:12:09.092486 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:09 crc kubenswrapper[4708]: E0203 07:12:09.092568 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:09 crc kubenswrapper[4708]: I0203 07:12:09.092589 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:09 crc kubenswrapper[4708]: E0203 07:12:09.092876 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:10 crc kubenswrapper[4708]: I0203 07:12:10.092929 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:10 crc kubenswrapper[4708]: E0203 07:12:10.093412 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:11 crc kubenswrapper[4708]: I0203 07:12:11.091776 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:11 crc kubenswrapper[4708]: I0203 07:12:11.091862 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:11 crc kubenswrapper[4708]: I0203 07:12:11.091880 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:11 crc kubenswrapper[4708]: E0203 07:12:11.091917 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:11 crc kubenswrapper[4708]: E0203 07:12:11.092041 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:11 crc kubenswrapper[4708]: E0203 07:12:11.092130 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:12 crc kubenswrapper[4708]: I0203 07:12:12.093188 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:12 crc kubenswrapper[4708]: E0203 07:12:12.094048 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:12 crc kubenswrapper[4708]: I0203 07:12:12.094318 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:12:12 crc kubenswrapper[4708]: E0203 07:12:12.094514 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" Feb 03 07:12:13 crc kubenswrapper[4708]: I0203 07:12:13.092179 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:13 crc kubenswrapper[4708]: I0203 07:12:13.092219 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:13 crc kubenswrapper[4708]: I0203 07:12:13.092257 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:13 crc kubenswrapper[4708]: E0203 07:12:13.092287 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:13 crc kubenswrapper[4708]: E0203 07:12:13.092369 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:13 crc kubenswrapper[4708]: E0203 07:12:13.092461 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:14 crc kubenswrapper[4708]: I0203 07:12:14.092826 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:14 crc kubenswrapper[4708]: E0203 07:12:14.093048 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:15 crc kubenswrapper[4708]: I0203 07:12:15.092905 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:15 crc kubenswrapper[4708]: I0203 07:12:15.092922 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:15 crc kubenswrapper[4708]: I0203 07:12:15.092946 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:15 crc kubenswrapper[4708]: E0203 07:12:15.093024 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:15 crc kubenswrapper[4708]: E0203 07:12:15.093149 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:15 crc kubenswrapper[4708]: E0203 07:12:15.093201 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:16 crc kubenswrapper[4708]: I0203 07:12:16.092554 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:16 crc kubenswrapper[4708]: E0203 07:12:16.092748 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:17 crc kubenswrapper[4708]: I0203 07:12:17.092072 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:17 crc kubenswrapper[4708]: I0203 07:12:17.092160 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:17 crc kubenswrapper[4708]: I0203 07:12:17.092178 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:17 crc kubenswrapper[4708]: E0203 07:12:17.092310 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:17 crc kubenswrapper[4708]: E0203 07:12:17.092462 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:17 crc kubenswrapper[4708]: E0203 07:12:17.092584 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:18 crc kubenswrapper[4708]: I0203 07:12:18.092082 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:18 crc kubenswrapper[4708]: E0203 07:12:18.092523 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:18 crc kubenswrapper[4708]: I0203 07:12:18.725025 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2fzr_7cedfe91-d1c3-4c56-9aac-797ecade9468/kube-multus/1.log" Feb 03 07:12:18 crc kubenswrapper[4708]: I0203 07:12:18.725938 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2fzr_7cedfe91-d1c3-4c56-9aac-797ecade9468/kube-multus/0.log" Feb 03 07:12:18 crc kubenswrapper[4708]: I0203 07:12:18.726002 4708 generic.go:334] "Generic (PLEG): container finished" podID="7cedfe91-d1c3-4c56-9aac-797ecade9468" containerID="0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e" exitCode=1 Feb 03 07:12:18 crc kubenswrapper[4708]: I0203 07:12:18.726051 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2fzr" event={"ID":"7cedfe91-d1c3-4c56-9aac-797ecade9468","Type":"ContainerDied","Data":"0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e"} Feb 03 07:12:18 crc kubenswrapper[4708]: I0203 07:12:18.726099 4708 scope.go:117] "RemoveContainer" containerID="5cf8782a88dadf91f6b0b3bf5b8b49419075336ffb741fc33a8eee3892ba62bd" Feb 03 07:12:18 crc kubenswrapper[4708]: I0203 07:12:18.726681 4708 scope.go:117] "RemoveContainer" containerID="0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e" Feb 03 07:12:18 crc kubenswrapper[4708]: E0203 07:12:18.727029 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-f2fzr_openshift-multus(7cedfe91-d1c3-4c56-9aac-797ecade9468)\"" pod="openshift-multus/multus-f2fzr" podUID="7cedfe91-d1c3-4c56-9aac-797ecade9468" Feb 03 07:12:19 crc kubenswrapper[4708]: I0203 07:12:19.092986 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:19 crc kubenswrapper[4708]: I0203 07:12:19.093136 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:19 crc kubenswrapper[4708]: I0203 07:12:19.092986 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:19 crc kubenswrapper[4708]: E0203 07:12:19.093210 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:19 crc kubenswrapper[4708]: E0203 07:12:19.093395 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:19 crc kubenswrapper[4708]: E0203 07:12:19.093605 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:19 crc kubenswrapper[4708]: I0203 07:12:19.730541 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2fzr_7cedfe91-d1c3-4c56-9aac-797ecade9468/kube-multus/1.log" Feb 03 07:12:20 crc kubenswrapper[4708]: I0203 07:12:20.092690 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:20 crc kubenswrapper[4708]: E0203 07:12:20.092872 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:21 crc kubenswrapper[4708]: I0203 07:12:21.092651 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:21 crc kubenswrapper[4708]: I0203 07:12:21.092706 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:21 crc kubenswrapper[4708]: I0203 07:12:21.092859 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:21 crc kubenswrapper[4708]: E0203 07:12:21.092858 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:21 crc kubenswrapper[4708]: E0203 07:12:21.093272 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:21 crc kubenswrapper[4708]: E0203 07:12:21.093406 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:22 crc kubenswrapper[4708]: I0203 07:12:22.092070 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:22 crc kubenswrapper[4708]: E0203 07:12:22.093383 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:22 crc kubenswrapper[4708]: E0203 07:12:22.111869 4708 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Feb 03 07:12:22 crc kubenswrapper[4708]: E0203 07:12:22.200537 4708 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 03 07:12:23 crc kubenswrapper[4708]: I0203 07:12:23.092956 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:23 crc kubenswrapper[4708]: I0203 07:12:23.092957 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:23 crc kubenswrapper[4708]: I0203 07:12:23.093001 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:23 crc kubenswrapper[4708]: E0203 07:12:23.093673 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:23 crc kubenswrapper[4708]: E0203 07:12:23.094017 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:23 crc kubenswrapper[4708]: E0203 07:12:23.094048 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:23 crc kubenswrapper[4708]: I0203 07:12:23.094347 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:12:23 crc kubenswrapper[4708]: E0203 07:12:23.094600 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2sfqf_openshift-ovn-kubernetes(b0d14461-efec-4909-82de-2cce585892a4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" Feb 03 07:12:24 crc kubenswrapper[4708]: I0203 07:12:24.092845 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:24 crc kubenswrapper[4708]: E0203 07:12:24.093048 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:25 crc kubenswrapper[4708]: I0203 07:12:25.092406 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:25 crc kubenswrapper[4708]: I0203 07:12:25.092452 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:25 crc kubenswrapper[4708]: E0203 07:12:25.092604 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:25 crc kubenswrapper[4708]: I0203 07:12:25.092559 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:25 crc kubenswrapper[4708]: E0203 07:12:25.092692 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:25 crc kubenswrapper[4708]: E0203 07:12:25.092884 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:26 crc kubenswrapper[4708]: I0203 07:12:26.092102 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:26 crc kubenswrapper[4708]: E0203 07:12:26.092591 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:27 crc kubenswrapper[4708]: I0203 07:12:27.091899 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:27 crc kubenswrapper[4708]: I0203 07:12:27.091993 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:27 crc kubenswrapper[4708]: E0203 07:12:27.092147 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:27 crc kubenswrapper[4708]: I0203 07:12:27.092208 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:27 crc kubenswrapper[4708]: E0203 07:12:27.092277 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:27 crc kubenswrapper[4708]: E0203 07:12:27.093030 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:27 crc kubenswrapper[4708]: E0203 07:12:27.202454 4708 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 03 07:12:28 crc kubenswrapper[4708]: I0203 07:12:28.092474 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:28 crc kubenswrapper[4708]: E0203 07:12:28.092672 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:29 crc kubenswrapper[4708]: I0203 07:12:29.091996 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:29 crc kubenswrapper[4708]: I0203 07:12:29.092078 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:29 crc kubenswrapper[4708]: I0203 07:12:29.092236 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:29 crc kubenswrapper[4708]: E0203 07:12:29.092227 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:29 crc kubenswrapper[4708]: E0203 07:12:29.092449 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:29 crc kubenswrapper[4708]: E0203 07:12:29.092610 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:30 crc kubenswrapper[4708]: I0203 07:12:30.092446 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:30 crc kubenswrapper[4708]: E0203 07:12:30.093514 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:31 crc kubenswrapper[4708]: I0203 07:12:31.092894 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:31 crc kubenswrapper[4708]: E0203 07:12:31.093098 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:31 crc kubenswrapper[4708]: I0203 07:12:31.092917 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:31 crc kubenswrapper[4708]: I0203 07:12:31.093381 4708 scope.go:117] "RemoveContainer" containerID="0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e" Feb 03 07:12:31 crc kubenswrapper[4708]: I0203 07:12:31.093414 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:31 crc kubenswrapper[4708]: E0203 07:12:31.093408 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:31 crc kubenswrapper[4708]: E0203 07:12:31.093528 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:31 crc kubenswrapper[4708]: I0203 07:12:31.773531 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2fzr_7cedfe91-d1c3-4c56-9aac-797ecade9468/kube-multus/1.log" Feb 03 07:12:31 crc kubenswrapper[4708]: I0203 07:12:31.773581 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2fzr" event={"ID":"7cedfe91-d1c3-4c56-9aac-797ecade9468","Type":"ContainerStarted","Data":"998f20f20c9b0feb9812819e21b6baa21d47ead8601fee9887b8830380f6a31b"} Feb 03 07:12:32 crc kubenswrapper[4708]: I0203 07:12:32.092044 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:32 crc kubenswrapper[4708]: E0203 07:12:32.093848 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:32 crc kubenswrapper[4708]: E0203 07:12:32.203506 4708 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 03 07:12:33 crc kubenswrapper[4708]: I0203 07:12:33.092085 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:33 crc kubenswrapper[4708]: E0203 07:12:33.092247 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:33 crc kubenswrapper[4708]: I0203 07:12:33.092350 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:33 crc kubenswrapper[4708]: I0203 07:12:33.092422 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:33 crc kubenswrapper[4708]: E0203 07:12:33.092484 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:33 crc kubenswrapper[4708]: E0203 07:12:33.092658 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:34 crc kubenswrapper[4708]: I0203 07:12:34.092273 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:34 crc kubenswrapper[4708]: E0203 07:12:34.092521 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:34 crc kubenswrapper[4708]: I0203 07:12:34.093680 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:12:34 crc kubenswrapper[4708]: I0203 07:12:34.787452 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/3.log" Feb 03 07:12:34 crc kubenswrapper[4708]: I0203 07:12:34.790305 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerStarted","Data":"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99"} Feb 03 07:12:34 crc kubenswrapper[4708]: I0203 07:12:34.790728 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:12:34 crc kubenswrapper[4708]: I0203 07:12:34.826925 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podStartSLOduration=110.826882596 podStartE2EDuration="1m50.826882596s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:34.826189649 +0000 UTC m=+133.808136486" watchObservedRunningTime="2026-02-03 07:12:34.826882596 +0000 UTC m=+133.808829403" Feb 03 07:12:34 crc kubenswrapper[4708]: I0203 07:12:34.915976 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-6thl9"] Feb 03 07:12:34 crc kubenswrapper[4708]: I0203 07:12:34.916123 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:34 crc kubenswrapper[4708]: E0203 07:12:34.916283 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:35 crc kubenswrapper[4708]: I0203 07:12:35.092319 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:35 crc kubenswrapper[4708]: I0203 07:12:35.092366 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:35 crc kubenswrapper[4708]: E0203 07:12:35.092444 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:35 crc kubenswrapper[4708]: E0203 07:12:35.092538 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:36 crc kubenswrapper[4708]: I0203 07:12:36.092855 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:36 crc kubenswrapper[4708]: I0203 07:12:36.092938 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:36 crc kubenswrapper[4708]: E0203 07:12:36.093012 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6thl9" podUID="851add34-7566-4ed5-b70a-c7935eb26e4f" Feb 03 07:12:36 crc kubenswrapper[4708]: E0203 07:12:36.093106 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 03 07:12:37 crc kubenswrapper[4708]: I0203 07:12:37.091844 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:37 crc kubenswrapper[4708]: I0203 07:12:37.091932 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:37 crc kubenswrapper[4708]: E0203 07:12:37.092132 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 03 07:12:37 crc kubenswrapper[4708]: E0203 07:12:37.091966 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 03 07:12:38 crc kubenswrapper[4708]: I0203 07:12:38.092833 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:38 crc kubenswrapper[4708]: I0203 07:12:38.093070 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:12:38 crc kubenswrapper[4708]: I0203 07:12:38.099443 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 03 07:12:38 crc kubenswrapper[4708]: I0203 07:12:38.099473 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 03 07:12:38 crc kubenswrapper[4708]: I0203 07:12:38.099528 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 03 07:12:38 crc kubenswrapper[4708]: I0203 07:12:38.102110 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 03 07:12:39 crc kubenswrapper[4708]: I0203 07:12:39.092139 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:39 crc kubenswrapper[4708]: I0203 07:12:39.092154 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:39 crc kubenswrapper[4708]: I0203 07:12:39.095080 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 03 07:12:39 crc kubenswrapper[4708]: I0203 07:12:39.099271 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.288178 4708 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.335239 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xpdpr"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.335713 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.337039 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.337590 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.339049 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.339370 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.339721 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.339943 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.340095 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.340231 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.340626 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.343911 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.346105 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.346437 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.346710 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.346815 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.346731 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.346737 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.347612 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.347719 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.349602 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.349691 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.353889 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qftz5"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.357269 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.357998 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-ds4j6"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.358188 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.358313 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.362500 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9fjv2"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.364212 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.364429 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.367861 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.368048 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.368183 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.368428 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.368533 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.368667 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.368966 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.369269 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.369296 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.369594 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.369917 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.369984 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.372149 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.372958 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lfvrw"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.373481 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.376015 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.376424 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.377304 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.379109 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.382134 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mts5h"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.382863 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.385929 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.391116 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-j5cvd"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.391563 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.391977 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.392421 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.392786 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.394236 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.395845 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.395967 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.396065 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.396177 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.396281 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.396492 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.396913 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.397005 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.397096 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.397193 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.397287 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.397372 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.398706 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-c2mlc"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.399321 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-c2mlc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.399910 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-zd8kn"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.400490 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.430598 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.430965 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.431569 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.431857 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.432160 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.432984 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.433609 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.433999 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.440820 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.441218 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.442665 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xpdpr"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.442696 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.444229 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.444386 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9fjv2"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.446575 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.446838 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.447238 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.447265 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.447457 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.447578 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448043 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448194 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448272 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448311 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448375 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448448 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448479 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448695 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448532 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448559 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448854 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448612 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.448654 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.449111 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.449213 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.451449 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.452064 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.452520 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.452599 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.454748 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.454908 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.455031 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.455203 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.455380 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.455589 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.456002 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.456725 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.457959 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lfvrw"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.460036 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qftz5"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.461053 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.461284 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.462742 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.462871 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.463139 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.463144 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.464872 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clxjq\" (UniqueName: \"kubernetes.io/projected/abf360a5-e982-4b3e-a814-511d57e9073f-kube-api-access-clxjq\") pod \"dns-operator-744455d44c-mts5h\" (UID: \"abf360a5-e982-4b3e-a814-511d57e9073f\") " pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.464930 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.464977 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-config\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465027 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkldd\" (UniqueName: \"kubernetes.io/projected/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-kube-api-access-qkldd\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465082 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-etcd-serving-ca\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465104 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465149 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3bb818a6-b7dd-4e6a-b767-394bca081222-audit-dir\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465172 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3bb818a6-b7dd-4e6a-b767-394bca081222-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465227 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd46g\" (UniqueName: \"kubernetes.io/projected/a34c5792-5895-4d08-9e7e-b3948f5be096-kube-api-access-sd46g\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465270 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a34c5792-5895-4d08-9e7e-b3948f5be096-config\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465353 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4d3118a-28d4-403b-95c3-f2a11c14846d-serving-cert\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465447 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465477 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3bb818a6-b7dd-4e6a-b767-394bca081222-audit-policies\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465544 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c4d3118a-28d4-403b-95c3-f2a11c14846d-node-pullsecrets\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465624 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c4d3118a-28d4-403b-95c3-f2a11c14846d-encryption-config\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465655 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-serving-cert\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465718 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-service-ca-bundle\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465743 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb71375d-01dd-442b-ac48-a7f26ccde85d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mcrbn\" (UID: \"cb71375d-01dd-442b-ac48-a7f26ccde85d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465919 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfwxc\" (UniqueName: \"kubernetes.io/projected/3bb818a6-b7dd-4e6a-b767-394bca081222-kube-api-access-cfwxc\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.465999 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a34c5792-5895-4d08-9e7e-b3948f5be096-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466160 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1edd916-a3bf-4331-abba-d5c8753d4377-serving-cert\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466214 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-serving-cert\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466245 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/283462c1-8f31-40aa-b570-96fff19ff3d0-serving-cert\") pod \"openshift-config-operator-7777fb866f-tdn7n\" (UID: \"283462c1-8f31-40aa-b570-96fff19ff3d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466270 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-audit\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466296 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-image-import-ca\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466326 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466359 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466382 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bb818a6-b7dd-4e6a-b767-394bca081222-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466392 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466407 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb71375d-01dd-442b-ac48-a7f26ccde85d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mcrbn\" (UID: \"cb71375d-01dd-442b-ac48-a7f26ccde85d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466449 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-client-ca\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466483 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466553 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cdmx\" (UniqueName: \"kubernetes.io/projected/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-kube-api-access-7cdmx\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466577 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-trusted-ca-bundle\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466625 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-dir\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466656 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-config\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466678 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3bb818a6-b7dd-4e6a-b767-394bca081222-etcd-client\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466706 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-config\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466729 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/544ec176-92f4-4374-ae80-2ed8717172d1-machine-approver-tls\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466740 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466824 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466856 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466900 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-service-ca\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.466948 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhqph\" (UniqueName: \"kubernetes.io/projected/544ec176-92f4-4374-ae80-2ed8717172d1-kube-api-access-zhqph\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467056 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467110 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj2mp\" (UniqueName: \"kubernetes.io/projected/3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46-kube-api-access-tj2mp\") pod \"cluster-samples-operator-665b6dd947-mwrf4\" (UID: \"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467161 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mwrf4\" (UID: \"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467188 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-policies\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467212 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467237 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467261 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467420 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-config\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467482 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc5qg\" (UniqueName: \"kubernetes.io/projected/cb71375d-01dd-442b-ac48-a7f26ccde85d-kube-api-access-fc5qg\") pod \"openshift-apiserver-operator-796bbdcf4f-mcrbn\" (UID: \"cb71375d-01dd-442b-ac48-a7f26ccde85d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467522 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-trusted-ca\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467661 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467123 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.467966 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-serving-cert\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468056 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c4d3118a-28d4-403b-95c3-f2a11c14846d-etcd-client\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468156 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/544ec176-92f4-4374-ae80-2ed8717172d1-auth-proxy-config\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468206 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4dnt\" (UniqueName: \"kubernetes.io/projected/283462c1-8f31-40aa-b570-96fff19ff3d0-kube-api-access-q4dnt\") pod \"openshift-config-operator-7777fb866f-tdn7n\" (UID: \"283462c1-8f31-40aa-b570-96fff19ff3d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468254 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrkqj\" (UniqueName: \"kubernetes.io/projected/c4d3118a-28d4-403b-95c3-f2a11c14846d-kube-api-access-rrkqj\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468308 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs8bz\" (UniqueName: \"kubernetes.io/projected/d1edd916-a3bf-4331-abba-d5c8753d4377-kube-api-access-qs8bz\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468362 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468415 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a34c5792-5895-4d08-9e7e-b3948f5be096-images\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468492 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-config\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468532 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/abf360a5-e982-4b3e-a814-511d57e9073f-metrics-tls\") pod \"dns-operator-744455d44c-mts5h\" (UID: \"abf360a5-e982-4b3e-a814-511d57e9073f\") " pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468559 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3bb818a6-b7dd-4e6a-b767-394bca081222-encryption-config\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468584 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8246l\" (UniqueName: \"kubernetes.io/projected/46ee42c1-592d-47c3-85ba-ead60edf7aca-kube-api-access-8246l\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468618 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-oauth-serving-cert\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468651 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/544ec176-92f4-4374-ae80-2ed8717172d1-config\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468692 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msn54\" (UniqueName: \"kubernetes.io/projected/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-kube-api-access-msn54\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468718 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bb818a6-b7dd-4e6a-b767-394bca081222-serving-cert\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468740 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/283462c1-8f31-40aa-b570-96fff19ff3d0-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tdn7n\" (UID: \"283462c1-8f31-40aa-b570-96fff19ff3d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468766 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-oauth-config\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468816 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfxxg\" (UniqueName: \"kubernetes.io/projected/7ee20271-e2ce-4476-a011-5e00e19126bf-kube-api-access-zfxxg\") pod \"downloads-7954f5f757-c2mlc\" (UID: \"7ee20271-e2ce-4476-a011-5e00e19126bf\") " pod="openshift-console/downloads-7954f5f757-c2mlc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.468892 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c4d3118a-28d4-403b-95c3-f2a11c14846d-audit-dir\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.469148 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.473144 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nqhrk"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.473846 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.474389 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.475209 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.475518 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.475568 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.475668 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.476012 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.476304 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.476488 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.478304 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.478833 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.479198 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.479453 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.481353 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.502641 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.503535 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.516497 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-zrxh2"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.516652 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.517631 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.518001 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.518180 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.519333 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.519615 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.520358 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.524027 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.524348 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.525008 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.525359 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.525461 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.525634 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.526186 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.528729 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mq55f"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.529523 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.532953 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.533762 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.536610 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.537974 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.538666 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.539241 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.539783 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-5tffj"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.540414 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5tffj" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.540880 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.541227 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.541707 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.548259 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.549566 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.549672 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.550448 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.550951 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-2wrb6"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.551714 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.552394 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jfpkc"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.553297 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.553489 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t2lvz"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.554193 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.556104 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.556667 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-l76sw"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.557158 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.558443 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.558845 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.558947 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-ds4j6"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.560623 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.564535 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.567923 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569337 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569619 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1edd916-a3bf-4331-abba-d5c8753d4377-serving-cert\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569659 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-serving-cert\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569689 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d24b4086-5d15-4100-b580-9d9b69aa7602-serving-cert\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569716 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d24b4086-5d15-4100-b580-9d9b69aa7602-config\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569743 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569770 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bb818a6-b7dd-4e6a-b767-394bca081222-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569814 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb71375d-01dd-442b-ac48-a7f26ccde85d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mcrbn\" (UID: \"cb71375d-01dd-442b-ac48-a7f26ccde85d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569844 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f52e0f35-b2d7-40ed-8e44-3c4408657eb0-config\") pod \"kube-controller-manager-operator-78b949d7b-8xpd7\" (UID: \"f52e0f35-b2d7-40ed-8e44-3c4408657eb0\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569871 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ad9206f-443a-4f4b-820f-f017581bb341-proxy-tls\") pod \"machine-config-controller-84d6567774-gldbt\" (UID: \"2ad9206f-443a-4f4b-820f-f017581bb341\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569893 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d24b4086-5d15-4100-b580-9d9b69aa7602-etcd-ca\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569918 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lw45p\" (UniqueName: \"kubernetes.io/projected/a1054514-9df0-4244-938c-9c6430b8183b-kube-api-access-lw45p\") pod \"openshift-controller-manager-operator-756b6f6bc6-vtsww\" (UID: \"a1054514-9df0-4244-938c-9c6430b8183b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569942 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea-srv-cert\") pod \"olm-operator-6b444d44fb-mkbsf\" (UID: \"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569966 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cdmx\" (UniqueName: \"kubernetes.io/projected/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-kube-api-access-7cdmx\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.569994 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-trusted-ca-bundle\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570018 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d24b4086-5d15-4100-b580-9d9b69aa7602-etcd-client\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570041 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-dir\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570063 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-config\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570087 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3bb818a6-b7dd-4e6a-b767-394bca081222-etcd-client\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570108 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/544ec176-92f4-4374-ae80-2ed8717172d1-machine-approver-tls\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570140 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f8f529d7-932a-4047-b603-f84e03fe6898-stats-auth\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570163 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570185 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-service-ca\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570211 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/69843c19-ead2-4248-a016-c29c58ddddc3-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570236 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mwrf4\" (UID: \"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570260 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570283 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-config\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570306 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc5qg\" (UniqueName: \"kubernetes.io/projected/cb71375d-01dd-442b-ac48-a7f26ccde85d-kube-api-access-fc5qg\") pod \"openshift-apiserver-operator-796bbdcf4f-mcrbn\" (UID: \"cb71375d-01dd-442b-ac48-a7f26ccde85d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570327 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/69843c19-ead2-4248-a016-c29c58ddddc3-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570344 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8f529d7-932a-4047-b603-f84e03fe6898-service-ca-bundle\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570360 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/12242d4a-0312-44a6-8283-9794875efda5-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570377 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn6g8\" (UniqueName: \"kubernetes.io/projected/2ad9206f-443a-4f4b-820f-f017581bb341-kube-api-access-zn6g8\") pod \"machine-config-controller-84d6567774-gldbt\" (UID: \"2ad9206f-443a-4f4b-820f-f017581bb341\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570395 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570411 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8xhk\" (UniqueName: \"kubernetes.io/projected/69843c19-ead2-4248-a016-c29c58ddddc3-kube-api-access-v8xhk\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570429 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-serving-cert\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570457 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4dnt\" (UniqueName: \"kubernetes.io/projected/283462c1-8f31-40aa-b570-96fff19ff3d0-kube-api-access-q4dnt\") pod \"openshift-config-operator-7777fb866f-tdn7n\" (UID: \"283462c1-8f31-40aa-b570-96fff19ff3d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570480 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrkqj\" (UniqueName: \"kubernetes.io/projected/c4d3118a-28d4-403b-95c3-f2a11c14846d-kube-api-access-rrkqj\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570497 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs8bz\" (UniqueName: \"kubernetes.io/projected/d1edd916-a3bf-4331-abba-d5c8753d4377-kube-api-access-qs8bz\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570522 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-config\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570540 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8246l\" (UniqueName: \"kubernetes.io/projected/46ee42c1-592d-47c3-85ba-ead60edf7aca-kube-api-access-8246l\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570557 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-oauth-serving-cert\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570578 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/544ec176-92f4-4374-ae80-2ed8717172d1-config\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570601 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ead3a61c-4b09-4f98-866a-1e66ed92d084-secret-volume\") pod \"collect-profiles-29501700-w7xxc\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570620 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bb818a6-b7dd-4e6a-b767-394bca081222-serving-cert\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570649 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570668 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d24b4086-5d15-4100-b580-9d9b69aa7602-etcd-service-ca\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570686 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1054514-9df0-4244-938c-9c6430b8183b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-vtsww\" (UID: \"a1054514-9df0-4244-938c-9c6430b8183b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570703 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea-profile-collector-cert\") pod \"olm-operator-6b444d44fb-mkbsf\" (UID: \"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570726 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkldd\" (UniqueName: \"kubernetes.io/projected/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-kube-api-access-qkldd\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570751 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/69843c19-ead2-4248-a016-c29c58ddddc3-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570753 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb71375d-01dd-442b-ac48-a7f26ccde85d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mcrbn\" (UID: \"cb71375d-01dd-442b-ac48-a7f26ccde85d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570771 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pts46\" (UniqueName: \"kubernetes.io/projected/f8f529d7-932a-4047-b603-f84e03fe6898-kube-api-access-pts46\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570812 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570835 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xkcq\" (UniqueName: \"kubernetes.io/projected/12242d4a-0312-44a6-8283-9794875efda5-kube-api-access-4xkcq\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570855 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3bb818a6-b7dd-4e6a-b767-394bca081222-audit-dir\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570875 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3bb818a6-b7dd-4e6a-b767-394bca081222-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570896 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd46g\" (UniqueName: \"kubernetes.io/projected/a34c5792-5895-4d08-9e7e-b3948f5be096-kube-api-access-sd46g\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570940 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f52e0f35-b2d7-40ed-8e44-3c4408657eb0-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8xpd7\" (UID: \"f52e0f35-b2d7-40ed-8e44-3c4408657eb0\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570965 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1054514-9df0-4244-938c-9c6430b8183b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-vtsww\" (UID: \"a1054514-9df0-4244-938c-9c6430b8183b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.570992 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-serving-cert\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571016 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-service-ca-bundle\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571044 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb71375d-01dd-442b-ac48-a7f26ccde85d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mcrbn\" (UID: \"cb71375d-01dd-442b-ac48-a7f26ccde85d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571065 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfwxc\" (UniqueName: \"kubernetes.io/projected/3bb818a6-b7dd-4e6a-b767-394bca081222-kube-api-access-cfwxc\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571091 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a34c5792-5895-4d08-9e7e-b3948f5be096-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571135 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfkf6\" (UniqueName: \"kubernetes.io/projected/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-kube-api-access-tfkf6\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571165 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/283462c1-8f31-40aa-b570-96fff19ff3d0-serving-cert\") pod \"openshift-config-operator-7777fb866f-tdn7n\" (UID: \"283462c1-8f31-40aa-b570-96fff19ff3d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571189 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-audit\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571216 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-image-import-ca\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571242 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571278 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-client-ca\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571310 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-trusted-ca\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571360 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571414 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-config\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571447 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-metrics-tls\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571480 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571512 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ead3a61c-4b09-4f98-866a-1e66ed92d084-config-volume\") pod \"collect-profiles-29501700-w7xxc\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571547 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhqph\" (UniqueName: \"kubernetes.io/projected/544ec176-92f4-4374-ae80-2ed8717172d1-kube-api-access-zhqph\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571580 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2ad9206f-443a-4f4b-820f-f017581bb341-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-gldbt\" (UID: \"2ad9206f-443a-4f4b-820f-f017581bb341\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571615 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571647 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj2mp\" (UniqueName: \"kubernetes.io/projected/3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46-kube-api-access-tj2mp\") pod \"cluster-samples-operator-665b6dd947-mwrf4\" (UID: \"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571682 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-policies\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571715 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571753 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571822 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgpbd\" (UniqueName: \"kubernetes.io/projected/f6237c57-eb31-40c5-8b6c-75a77a58ccdb-kube-api-access-sgpbd\") pod \"migrator-59844c95c7-5c2p2\" (UID: \"f6237c57-eb31-40c5-8b6c-75a77a58ccdb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571858 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-trusted-ca\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571893 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c4d3118a-28d4-403b-95c3-f2a11c14846d-etcd-client\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571918 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/544ec176-92f4-4374-ae80-2ed8717172d1-auth-proxy-config\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.571980 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572015 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a34c5792-5895-4d08-9e7e-b3948f5be096-images\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572080 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-bound-sa-token\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572117 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/abf360a5-e982-4b3e-a814-511d57e9073f-metrics-tls\") pod \"dns-operator-744455d44c-mts5h\" (UID: \"abf360a5-e982-4b3e-a814-511d57e9073f\") " pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572142 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3bb818a6-b7dd-4e6a-b767-394bca081222-encryption-config\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572166 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q47rx\" (UniqueName: \"kubernetes.io/projected/d24b4086-5d15-4100-b580-9d9b69aa7602-kube-api-access-q47rx\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572193 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7587g\" (UniqueName: \"kubernetes.io/projected/df8a6753-1332-4db6-b738-020474d60851-kube-api-access-7587g\") pod \"ingress-canary-5tffj\" (UID: \"df8a6753-1332-4db6-b738-020474d60851\") " pod="openshift-ingress-canary/ingress-canary-5tffj" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572214 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msn54\" (UniqueName: \"kubernetes.io/projected/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-kube-api-access-msn54\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572239 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/283462c1-8f31-40aa-b570-96fff19ff3d0-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tdn7n\" (UID: \"283462c1-8f31-40aa-b570-96fff19ff3d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572265 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d0028e2-24a1-479d-805d-9ac66cfdd68a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xcbl9\" (UID: \"6d0028e2-24a1-479d-805d-9ac66cfdd68a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572290 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-oauth-config\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572313 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d0028e2-24a1-479d-805d-9ac66cfdd68a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xcbl9\" (UID: \"6d0028e2-24a1-479d-805d-9ac66cfdd68a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572346 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfxxg\" (UniqueName: \"kubernetes.io/projected/7ee20271-e2ce-4476-a011-5e00e19126bf-kube-api-access-zfxxg\") pod \"downloads-7954f5f757-c2mlc\" (UID: \"7ee20271-e2ce-4476-a011-5e00e19126bf\") " pod="openshift-console/downloads-7954f5f757-c2mlc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572367 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c4d3118a-28d4-403b-95c3-f2a11c14846d-audit-dir\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572390 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f8f529d7-932a-4047-b603-f84e03fe6898-default-certificate\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572421 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clxjq\" (UniqueName: \"kubernetes.io/projected/abf360a5-e982-4b3e-a814-511d57e9073f-kube-api-access-clxjq\") pod \"dns-operator-744455d44c-mts5h\" (UID: \"abf360a5-e982-4b3e-a814-511d57e9073f\") " pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572446 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-config\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572475 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8f529d7-932a-4047-b603-f84e03fe6898-metrics-certs\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572495 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-etcd-serving-ca\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572527 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr8nt\" (UniqueName: \"kubernetes.io/projected/0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea-kube-api-access-lr8nt\") pod \"olm-operator-6b444d44fb-mkbsf\" (UID: \"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572554 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6fqj\" (UniqueName: \"kubernetes.io/projected/ead3a61c-4b09-4f98-866a-1e66ed92d084-kube-api-access-p6fqj\") pod \"collect-profiles-29501700-w7xxc\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572582 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a34c5792-5895-4d08-9e7e-b3948f5be096-config\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572611 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d0028e2-24a1-479d-805d-9ac66cfdd68a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xcbl9\" (UID: \"6d0028e2-24a1-479d-805d-9ac66cfdd68a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572631 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/12242d4a-0312-44a6-8283-9794875efda5-proxy-tls\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572868 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4d3118a-28d4-403b-95c3-f2a11c14846d-serving-cert\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572901 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572923 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3bb818a6-b7dd-4e6a-b767-394bca081222-audit-policies\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572954 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/df8a6753-1332-4db6-b738-020474d60851-cert\") pod \"ingress-canary-5tffj\" (UID: \"df8a6753-1332-4db6-b738-020474d60851\") " pod="openshift-ingress-canary/ingress-canary-5tffj" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572982 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c4d3118a-28d4-403b-95c3-f2a11c14846d-node-pullsecrets\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.573008 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c4d3118a-28d4-403b-95c3-f2a11c14846d-encryption-config\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.573042 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f52e0f35-b2d7-40ed-8e44-3c4408657eb0-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8xpd7\" (UID: \"f52e0f35-b2d7-40ed-8e44-3c4408657eb0\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.573063 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/12242d4a-0312-44a6-8283-9794875efda5-images\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.573525 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-trusted-ca-bundle\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.573818 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-dir\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.574507 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.574731 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-config\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.574970 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-config\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.574998 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/544ec176-92f4-4374-ae80-2ed8717172d1-config\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.576045 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1edd916-a3bf-4331-abba-d5c8753d4377-serving-cert\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.576522 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-serving-cert\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.572032 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bb818a6-b7dd-4e6a-b767-394bca081222-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.577064 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.577359 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.578055 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.578267 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.578323 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bb818a6-b7dd-4e6a-b767-394bca081222-serving-cert\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.579158 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-image-import-ca\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.579315 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-service-ca-bundle\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.579361 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.579320 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3bb818a6-b7dd-4e6a-b767-394bca081222-audit-dir\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.579640 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a34c5792-5895-4d08-9e7e-b3948f5be096-images\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.580026 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-config\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.580074 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-oauth-serving-cert\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.580195 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.580571 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-etcd-serving-ca\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.580935 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-client-ca\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.581363 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-service-ca\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.581435 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.581731 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3bb818a6-b7dd-4e6a-b767-394bca081222-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.581988 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.582062 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c4d3118a-28d4-403b-95c3-f2a11c14846d-node-pullsecrets\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.582729 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-config\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.583117 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/283462c1-8f31-40aa-b570-96fff19ff3d0-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tdn7n\" (UID: \"283462c1-8f31-40aa-b570-96fff19ff3d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.583415 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-serving-cert\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.583514 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3bb818a6-b7dd-4e6a-b767-394bca081222-etcd-client\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.583677 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.584394 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/544ec176-92f4-4374-ae80-2ed8717172d1-auth-proxy-config\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.584442 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.585009 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-serving-cert\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.585440 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/283462c1-8f31-40aa-b570-96fff19ff3d0-serving-cert\") pod \"openshift-config-operator-7777fb866f-tdn7n\" (UID: \"283462c1-8f31-40aa-b570-96fff19ff3d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.586179 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.587508 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c4d3118a-28d4-403b-95c3-f2a11c14846d-audit-dir\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.587592 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c4d3118a-28d4-403b-95c3-f2a11c14846d-etcd-client\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.588155 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.588252 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/abf360a5-e982-4b3e-a814-511d57e9073f-metrics-tls\") pod \"dns-operator-744455d44c-mts5h\" (UID: \"abf360a5-e982-4b3e-a814-511d57e9073f\") " pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.588409 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.588542 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nqhrk"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.588592 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-policies\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.588731 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3bb818a6-b7dd-4e6a-b767-394bca081222-audit-policies\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.588762 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3bb818a6-b7dd-4e6a-b767-394bca081222-encryption-config\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.588896 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/544ec176-92f4-4374-ae80-2ed8717172d1-machine-approver-tls\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.589767 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a34c5792-5895-4d08-9e7e-b3948f5be096-config\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.590938 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4d3118a-28d4-403b-95c3-f2a11c14846d-serving-cert\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.591561 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a34c5792-5895-4d08-9e7e-b3948f5be096-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.591734 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.592037 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-oauth-config\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.592275 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb71375d-01dd-442b-ac48-a7f26ccde85d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mcrbn\" (UID: \"cb71375d-01dd-442b-ac48-a7f26ccde85d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.592329 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.592458 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-trusted-ca\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.593232 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c4d3118a-28d4-403b-95c3-f2a11c14846d-encryption-config\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.593352 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-j5cvd"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.593429 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.594083 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.598455 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.598654 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mwrf4\" (UID: \"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.604582 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-c2mlc"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.604883 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mts5h"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.605961 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.606964 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.608218 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-zd8kn"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.609286 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.610292 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mq55f"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.611500 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.612866 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.613572 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.614772 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.615846 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-htktd"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.617099 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-htktd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.617128 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-btjfc"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.617383 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.618320 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.618343 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.619770 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.620615 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-2wrb6"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.621631 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jfpkc"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.622686 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t2lvz"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.623706 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.624731 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.626003 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-btjfc"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.626609 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/c4d3118a-28d4-403b-95c3-f2a11c14846d-audit\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.627384 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-config\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.627928 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5tffj"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.629603 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-l76sw"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.630936 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-htktd"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.632416 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-h9vhd"] Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.633346 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.644032 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.657564 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674537 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6fqj\" (UniqueName: \"kubernetes.io/projected/ead3a61c-4b09-4f98-866a-1e66ed92d084-kube-api-access-p6fqj\") pod \"collect-profiles-29501700-w7xxc\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674568 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d0028e2-24a1-479d-805d-9ac66cfdd68a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xcbl9\" (UID: \"6d0028e2-24a1-479d-805d-9ac66cfdd68a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674588 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/12242d4a-0312-44a6-8283-9794875efda5-proxy-tls\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674606 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/df8a6753-1332-4db6-b738-020474d60851-cert\") pod \"ingress-canary-5tffj\" (UID: \"df8a6753-1332-4db6-b738-020474d60851\") " pod="openshift-ingress-canary/ingress-canary-5tffj" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674624 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f52e0f35-b2d7-40ed-8e44-3c4408657eb0-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8xpd7\" (UID: \"f52e0f35-b2d7-40ed-8e44-3c4408657eb0\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674640 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/12242d4a-0312-44a6-8283-9794875efda5-images\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674659 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d24b4086-5d15-4100-b580-9d9b69aa7602-serving-cert\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674682 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d24b4086-5d15-4100-b580-9d9b69aa7602-config\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674709 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f52e0f35-b2d7-40ed-8e44-3c4408657eb0-config\") pod \"kube-controller-manager-operator-78b949d7b-8xpd7\" (UID: \"f52e0f35-b2d7-40ed-8e44-3c4408657eb0\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674734 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ad9206f-443a-4f4b-820f-f017581bb341-proxy-tls\") pod \"machine-config-controller-84d6567774-gldbt\" (UID: \"2ad9206f-443a-4f4b-820f-f017581bb341\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674833 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lw45p\" (UniqueName: \"kubernetes.io/projected/a1054514-9df0-4244-938c-9c6430b8183b-kube-api-access-lw45p\") pod \"openshift-controller-manager-operator-756b6f6bc6-vtsww\" (UID: \"a1054514-9df0-4244-938c-9c6430b8183b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674878 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea-srv-cert\") pod \"olm-operator-6b444d44fb-mkbsf\" (UID: \"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674911 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d24b4086-5d15-4100-b580-9d9b69aa7602-etcd-ca\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674938 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d24b4086-5d15-4100-b580-9d9b69aa7602-etcd-client\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.674969 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f8f529d7-932a-4047-b603-f84e03fe6898-stats-auth\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675004 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/69843c19-ead2-4248-a016-c29c58ddddc3-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675031 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8f529d7-932a-4047-b603-f84e03fe6898-service-ca-bundle\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675066 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/69843c19-ead2-4248-a016-c29c58ddddc3-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675092 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/12242d4a-0312-44a6-8283-9794875efda5-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675118 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn6g8\" (UniqueName: \"kubernetes.io/projected/2ad9206f-443a-4f4b-820f-f017581bb341-kube-api-access-zn6g8\") pod \"machine-config-controller-84d6567774-gldbt\" (UID: \"2ad9206f-443a-4f4b-820f-f017581bb341\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675146 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8xhk\" (UniqueName: \"kubernetes.io/projected/69843c19-ead2-4248-a016-c29c58ddddc3-kube-api-access-v8xhk\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675303 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ead3a61c-4b09-4f98-866a-1e66ed92d084-secret-volume\") pod \"collect-profiles-29501700-w7xxc\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675359 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1054514-9df0-4244-938c-9c6430b8183b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-vtsww\" (UID: \"a1054514-9df0-4244-938c-9c6430b8183b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675389 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea-profile-collector-cert\") pod \"olm-operator-6b444d44fb-mkbsf\" (UID: \"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675416 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d24b4086-5d15-4100-b580-9d9b69aa7602-etcd-service-ca\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675441 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pts46\" (UniqueName: \"kubernetes.io/projected/f8f529d7-932a-4047-b603-f84e03fe6898-kube-api-access-pts46\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675473 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/69843c19-ead2-4248-a016-c29c58ddddc3-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675500 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xkcq\" (UniqueName: \"kubernetes.io/projected/12242d4a-0312-44a6-8283-9794875efda5-kube-api-access-4xkcq\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675543 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f52e0f35-b2d7-40ed-8e44-3c4408657eb0-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8xpd7\" (UID: \"f52e0f35-b2d7-40ed-8e44-3c4408657eb0\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675569 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1054514-9df0-4244-938c-9c6430b8183b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-vtsww\" (UID: \"a1054514-9df0-4244-938c-9c6430b8183b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675657 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfkf6\" (UniqueName: \"kubernetes.io/projected/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-kube-api-access-tfkf6\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675909 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-trusted-ca\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675946 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-metrics-tls\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.675999 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ead3a61c-4b09-4f98-866a-1e66ed92d084-config-volume\") pod \"collect-profiles-29501700-w7xxc\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676123 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2ad9206f-443a-4f4b-820f-f017581bb341-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-gldbt\" (UID: \"2ad9206f-443a-4f4b-820f-f017581bb341\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676206 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgpbd\" (UniqueName: \"kubernetes.io/projected/f6237c57-eb31-40c5-8b6c-75a77a58ccdb-kube-api-access-sgpbd\") pod \"migrator-59844c95c7-5c2p2\" (UID: \"f6237c57-eb31-40c5-8b6c-75a77a58ccdb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676238 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-bound-sa-token\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676266 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q47rx\" (UniqueName: \"kubernetes.io/projected/d24b4086-5d15-4100-b580-9d9b69aa7602-kube-api-access-q47rx\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676285 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/12242d4a-0312-44a6-8283-9794875efda5-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676293 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7587g\" (UniqueName: \"kubernetes.io/projected/df8a6753-1332-4db6-b738-020474d60851-kube-api-access-7587g\") pod \"ingress-canary-5tffj\" (UID: \"df8a6753-1332-4db6-b738-020474d60851\") " pod="openshift-ingress-canary/ingress-canary-5tffj" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676354 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d0028e2-24a1-479d-805d-9ac66cfdd68a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xcbl9\" (UID: \"6d0028e2-24a1-479d-805d-9ac66cfdd68a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676392 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d0028e2-24a1-479d-805d-9ac66cfdd68a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xcbl9\" (UID: \"6d0028e2-24a1-479d-805d-9ac66cfdd68a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676459 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f8f529d7-932a-4047-b603-f84e03fe6898-default-certificate\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676495 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8f529d7-932a-4047-b603-f84e03fe6898-metrics-certs\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676526 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr8nt\" (UniqueName: \"kubernetes.io/projected/0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea-kube-api-access-lr8nt\") pod \"olm-operator-6b444d44fb-mkbsf\" (UID: \"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676585 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a1054514-9df0-4244-938c-9c6430b8183b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-vtsww\" (UID: \"a1054514-9df0-4244-938c-9c6430b8183b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.676834 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/69843c19-ead2-4248-a016-c29c58ddddc3-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.677240 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-trusted-ca\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.677763 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2ad9206f-443a-4f4b-820f-f017581bb341-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-gldbt\" (UID: \"2ad9206f-443a-4f4b-820f-f017581bb341\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.678599 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.679672 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/69843c19-ead2-4248-a016-c29c58ddddc3-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.679759 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1054514-9df0-4244-938c-9c6430b8183b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-vtsww\" (UID: \"a1054514-9df0-4244-938c-9c6430b8183b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.698248 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.718822 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.737504 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.758049 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.772175 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-metrics-tls\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.779251 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.797850 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.817751 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.837293 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.850669 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d0028e2-24a1-479d-805d-9ac66cfdd68a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xcbl9\" (UID: \"6d0028e2-24a1-479d-805d-9ac66cfdd68a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.858100 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.866403 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d0028e2-24a1-479d-805d-9ac66cfdd68a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xcbl9\" (UID: \"6d0028e2-24a1-479d-805d-9ac66cfdd68a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.878577 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.897895 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.912959 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f8f529d7-932a-4047-b603-f84e03fe6898-default-certificate\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.918171 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.938174 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.951695 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8f529d7-932a-4047-b603-f84e03fe6898-metrics-certs\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.958143 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.966851 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8f529d7-932a-4047-b603-f84e03fe6898-service-ca-bundle\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.978430 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.990538 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f8f529d7-932a-4047-b603-f84e03fe6898-stats-auth\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:42 crc kubenswrapper[4708]: I0203 07:12:42.998941 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.018017 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.037908 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.058609 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.078638 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.090755 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f52e0f35-b2d7-40ed-8e44-3c4408657eb0-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8xpd7\" (UID: \"f52e0f35-b2d7-40ed-8e44-3c4408657eb0\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.099124 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.106657 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f52e0f35-b2d7-40ed-8e44-3c4408657eb0-config\") pod \"kube-controller-manager-operator-78b949d7b-8xpd7\" (UID: \"f52e0f35-b2d7-40ed-8e44-3c4408657eb0\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.119918 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.130086 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ad9206f-443a-4f4b-820f-f017581bb341-proxy-tls\") pod \"machine-config-controller-84d6567774-gldbt\" (UID: \"2ad9206f-443a-4f4b-820f-f017581bb341\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.137979 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.157861 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.178760 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.186262 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/12242d4a-0312-44a6-8283-9794875efda5-images\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.198759 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.211000 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/12242d4a-0312-44a6-8283-9794875efda5-proxy-tls\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.218518 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.238552 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.260650 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.278567 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.299629 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.310100 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d24b4086-5d15-4100-b580-9d9b69aa7602-etcd-client\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.318101 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.325695 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d24b4086-5d15-4100-b580-9d9b69aa7602-etcd-ca\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.337949 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.345680 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d24b4086-5d15-4100-b580-9d9b69aa7602-config\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.358486 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.378511 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.398467 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.406563 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d24b4086-5d15-4100-b580-9d9b69aa7602-etcd-service-ca\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.418626 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.428757 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d24b4086-5d15-4100-b580-9d9b69aa7602-serving-cert\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.438344 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.459325 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.478720 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.498746 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.518106 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.539257 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.556994 4708 request.go:700] Waited for 1.018712454s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-storage-version-migrator-operator/secrets?fieldSelector=metadata.name%3Dserving-cert&limit=500&resourceVersion=0 Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.558709 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.578185 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.598255 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.618076 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.628726 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea-profile-collector-cert\") pod \"olm-operator-6b444d44fb-mkbsf\" (UID: \"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.630879 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ead3a61c-4b09-4f98-866a-1e66ed92d084-secret-volume\") pod \"collect-profiles-29501700-w7xxc\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.639068 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.649765 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea-srv-cert\") pod \"olm-operator-6b444d44fb-mkbsf\" (UID: \"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.658238 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: E0203 07:12:43.674911 4708 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Feb 03 07:12:43 crc kubenswrapper[4708]: E0203 07:12:43.675002 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/df8a6753-1332-4db6-b738-020474d60851-cert podName:df8a6753-1332-4db6-b738-020474d60851 nodeName:}" failed. No retries permitted until 2026-02-03 07:12:44.174980728 +0000 UTC m=+143.156927535 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/df8a6753-1332-4db6-b738-020474d60851-cert") pod "ingress-canary-5tffj" (UID: "df8a6753-1332-4db6-b738-020474d60851") : failed to sync secret cache: timed out waiting for the condition Feb 03 07:12:43 crc kubenswrapper[4708]: E0203 07:12:43.677562 4708 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Feb 03 07:12:43 crc kubenswrapper[4708]: E0203 07:12:43.677705 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ead3a61c-4b09-4f98-866a-1e66ed92d084-config-volume podName:ead3a61c-4b09-4f98-866a-1e66ed92d084 nodeName:}" failed. No retries permitted until 2026-02-03 07:12:44.177644395 +0000 UTC m=+143.159591222 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/ead3a61c-4b09-4f98-866a-1e66ed92d084-config-volume") pod "collect-profiles-29501700-w7xxc" (UID: "ead3a61c-4b09-4f98-866a-1e66ed92d084") : failed to sync configmap cache: timed out waiting for the condition Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.678123 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.698365 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.717547 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.737518 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.759121 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.799060 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.818974 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.839296 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.859128 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.879630 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.898285 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.918109 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.938441 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.958172 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 03 07:12:43 crc kubenswrapper[4708]: I0203 07:12:43.979224 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.009628 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.018213 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.037845 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.058713 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.077630 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.098584 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.119371 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.140033 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.159339 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.178914 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.199090 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.201628 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ead3a61c-4b09-4f98-866a-1e66ed92d084-config-volume\") pod \"collect-profiles-29501700-w7xxc\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.201896 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/df8a6753-1332-4db6-b738-020474d60851-cert\") pod \"ingress-canary-5tffj\" (UID: \"df8a6753-1332-4db6-b738-020474d60851\") " pod="openshift-ingress-canary/ingress-canary-5tffj" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.205031 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ead3a61c-4b09-4f98-866a-1e66ed92d084-config-volume\") pod \"collect-profiles-29501700-w7xxc\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.206979 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/df8a6753-1332-4db6-b738-020474d60851-cert\") pod \"ingress-canary-5tffj\" (UID: \"df8a6753-1332-4db6-b738-020474d60851\") " pod="openshift-ingress-canary/ingress-canary-5tffj" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.253002 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cdmx\" (UniqueName: \"kubernetes.io/projected/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-kube-api-access-7cdmx\") pod \"oauth-openshift-558db77b4-lfvrw\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.285555 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrkqj\" (UniqueName: \"kubernetes.io/projected/c4d3118a-28d4-403b-95c3-f2a11c14846d-kube-api-access-rrkqj\") pod \"apiserver-76f77b778f-qftz5\" (UID: \"c4d3118a-28d4-403b-95c3-f2a11c14846d\") " pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.296396 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd46g\" (UniqueName: \"kubernetes.io/projected/a34c5792-5895-4d08-9e7e-b3948f5be096-kube-api-access-sd46g\") pod \"machine-api-operator-5694c8668f-9fjv2\" (UID: \"a34c5792-5895-4d08-9e7e-b3948f5be096\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.314885 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.315399 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs8bz\" (UniqueName: \"kubernetes.io/projected/d1edd916-a3bf-4331-abba-d5c8753d4377-kube-api-access-qs8bz\") pod \"controller-manager-879f6c89f-xpdpr\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.326275 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.336297 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msn54\" (UniqueName: \"kubernetes.io/projected/0dfe66dc-684e-49ba-932e-11e3d0eff5b0-kube-api-access-msn54\") pod \"console-operator-58897d9998-j5cvd\" (UID: \"0dfe66dc-684e-49ba-932e-11e3d0eff5b0\") " pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.374689 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8246l\" (UniqueName: \"kubernetes.io/projected/46ee42c1-592d-47c3-85ba-ead60edf7aca-kube-api-access-8246l\") pod \"console-f9d7485db-zd8kn\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.378424 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhqph\" (UniqueName: \"kubernetes.io/projected/544ec176-92f4-4374-ae80-2ed8717172d1-kube-api-access-zhqph\") pod \"machine-approver-56656f9798-vzmxn\" (UID: \"544ec176-92f4-4374-ae80-2ed8717172d1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.395644 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clxjq\" (UniqueName: \"kubernetes.io/projected/abf360a5-e982-4b3e-a814-511d57e9073f-kube-api-access-clxjq\") pod \"dns-operator-744455d44c-mts5h\" (UID: \"abf360a5-e982-4b3e-a814-511d57e9073f\") " pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.416162 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc5qg\" (UniqueName: \"kubernetes.io/projected/cb71375d-01dd-442b-ac48-a7f26ccde85d-kube-api-access-fc5qg\") pod \"openshift-apiserver-operator-796bbdcf4f-mcrbn\" (UID: \"cb71375d-01dd-442b-ac48-a7f26ccde85d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.423304 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.429989 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.433757 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkldd\" (UniqueName: \"kubernetes.io/projected/b20ffaa1-a31e-451e-89a3-5e36287a0c5b-kube-api-access-qkldd\") pod \"authentication-operator-69f744f599-ds4j6\" (UID: \"b20ffaa1-a31e-451e-89a3-5e36287a0c5b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.454704 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj2mp\" (UniqueName: \"kubernetes.io/projected/3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46-kube-api-access-tj2mp\") pod \"cluster-samples-operator-665b6dd947-mwrf4\" (UID: \"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.472605 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4dnt\" (UniqueName: \"kubernetes.io/projected/283462c1-8f31-40aa-b570-96fff19ff3d0-kube-api-access-q4dnt\") pod \"openshift-config-operator-7777fb866f-tdn7n\" (UID: \"283462c1-8f31-40aa-b570-96fff19ff3d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.474636 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.493099 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfwxc\" (UniqueName: \"kubernetes.io/projected/3bb818a6-b7dd-4e6a-b767-394bca081222-kube-api-access-cfwxc\") pod \"apiserver-7bbb656c7d-fvbq2\" (UID: \"3bb818a6-b7dd-4e6a-b767-394bca081222\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.495110 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.515489 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfxxg\" (UniqueName: \"kubernetes.io/projected/7ee20271-e2ce-4476-a011-5e00e19126bf-kube-api-access-zfxxg\") pod \"downloads-7954f5f757-c2mlc\" (UID: \"7ee20271-e2ce-4476-a011-5e00e19126bf\") " pod="openshift-console/downloads-7954f5f757-c2mlc" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.517683 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.537951 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.548569 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.557238 4708 request.go:700] Waited for 1.939747102s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/configmaps?fieldSelector=metadata.name%3Ddns-default&limit=500&resourceVersion=0 Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.566884 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.572470 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lfvrw"] Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.578678 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.580340 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:44 crc kubenswrapper[4708]: W0203 07:12:44.590585 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod242d8dcd_b5fb_4599_9fe7_e83b7e81ffc6.slice/crio-d00d58bad66725c312925cbecaa349f376c190402d714470a9a618153e2081cd WatchSource:0}: Error finding container d00d58bad66725c312925cbecaa349f376c190402d714470a9a618153e2081cd: Status 404 returned error can't find the container with id d00d58bad66725c312925cbecaa349f376c190402d714470a9a618153e2081cd Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.598646 4708 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.601570 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.601878 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9fjv2"] Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.618023 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.632644 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.638455 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.648031 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.655398 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-zd8kn"] Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.658404 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.659296 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.670112 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.680936 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.699029 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-j5cvd"] Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.722087 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-c2mlc" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.726321 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lw45p\" (UniqueName: \"kubernetes.io/projected/a1054514-9df0-4244-938c-9c6430b8183b-kube-api-access-lw45p\") pod \"openshift-controller-manager-operator-756b6f6bc6-vtsww\" (UID: \"a1054514-9df0-4244-938c-9c6430b8183b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.741504 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.742516 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/69843c19-ead2-4248-a016-c29c58ddddc3-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.754280 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn6g8\" (UniqueName: \"kubernetes.io/projected/2ad9206f-443a-4f4b-820f-f017581bb341-kube-api-access-zn6g8\") pod \"machine-config-controller-84d6567774-gldbt\" (UID: \"2ad9206f-443a-4f4b-820f-f017581bb341\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:44 crc kubenswrapper[4708]: W0203 07:12:44.766333 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0dfe66dc_684e_49ba_932e_11e3d0eff5b0.slice/crio-d76d01776f6a8db065a3be0eb9cdddc611507de09ed38105ce6e884c11144cba WatchSource:0}: Error finding container d76d01776f6a8db065a3be0eb9cdddc611507de09ed38105ce6e884c11144cba: Status 404 returned error can't find the container with id d76d01776f6a8db065a3be0eb9cdddc611507de09ed38105ce6e884c11144cba Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.781513 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8xhk\" (UniqueName: \"kubernetes.io/projected/69843c19-ead2-4248-a016-c29c58ddddc3-kube-api-access-v8xhk\") pod \"cluster-image-registry-operator-dc59b4c8b-ghh7f\" (UID: \"69843c19-ead2-4248-a016-c29c58ddddc3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.795057 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pts46\" (UniqueName: \"kubernetes.io/projected/f8f529d7-932a-4047-b603-f84e03fe6898-kube-api-access-pts46\") pod \"router-default-5444994796-zrxh2\" (UID: \"f8f529d7-932a-4047-b603-f84e03fe6898\") " pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.804418 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.813996 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n"] Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.816199 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xkcq\" (UniqueName: \"kubernetes.io/projected/12242d4a-0312-44a6-8283-9794875efda5-kube-api-access-4xkcq\") pod \"machine-config-operator-74547568cd-xvf6j\" (UID: \"12242d4a-0312-44a6-8283-9794875efda5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.818404 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xpdpr"] Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.845602 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f52e0f35-b2d7-40ed-8e44-3c4408657eb0-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8xpd7\" (UID: \"f52e0f35-b2d7-40ed-8e44-3c4408657eb0\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.858317 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qftz5"] Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.859446 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6fqj\" (UniqueName: \"kubernetes.io/projected/ead3a61c-4b09-4f98-866a-1e66ed92d084-kube-api-access-p6fqj\") pod \"collect-profiles-29501700-w7xxc\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.873972 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-j5cvd" event={"ID":"0dfe66dc-684e-49ba-932e-11e3d0eff5b0","Type":"ContainerStarted","Data":"d76d01776f6a8db065a3be0eb9cdddc611507de09ed38105ce6e884c11144cba"} Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.876245 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" event={"ID":"a34c5792-5895-4d08-9e7e-b3948f5be096","Type":"ContainerStarted","Data":"688314cbd2fef9cb11b5cc5697b27fe6fc1657f98ad4e715bf17d9260e7d6e20"} Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.877987 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" event={"ID":"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6","Type":"ContainerStarted","Data":"d00d58bad66725c312925cbecaa349f376c190402d714470a9a618153e2081cd"} Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.880052 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zd8kn" event={"ID":"46ee42c1-592d-47c3-85ba-ead60edf7aca","Type":"ContainerStarted","Data":"b10290eea3eea1ee6ce16abf81570cc0e941ff5723c86c14615959e660bb82cd"} Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.881770 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" event={"ID":"544ec176-92f4-4374-ae80-2ed8717172d1","Type":"ContainerStarted","Data":"cec18753fd8eb08478839777c270437bf50f5b43845d975cfd996efc2638a557"} Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.882825 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr8nt\" (UniqueName: \"kubernetes.io/projected/0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea-kube-api-access-lr8nt\") pod \"olm-operator-6b444d44fb-mkbsf\" (UID: \"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.896934 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-bound-sa-token\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.916507 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q47rx\" (UniqueName: \"kubernetes.io/projected/d24b4086-5d15-4100-b580-9d9b69aa7602-kube-api-access-q47rx\") pod \"etcd-operator-b45778765-mq55f\" (UID: \"d24b4086-5d15-4100-b580-9d9b69aa7602\") " pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.957605 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d0028e2-24a1-479d-805d-9ac66cfdd68a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-xcbl9\" (UID: \"6d0028e2-24a1-479d-805d-9ac66cfdd68a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.974450 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgpbd\" (UniqueName: \"kubernetes.io/projected/f6237c57-eb31-40c5-8b6c-75a77a58ccdb-kube-api-access-sgpbd\") pod \"migrator-59844c95c7-5c2p2\" (UID: \"f6237c57-eb31-40c5-8b6c-75a77a58ccdb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2" Feb 03 07:12:44 crc kubenswrapper[4708]: I0203 07:12:44.983194 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7587g\" (UniqueName: \"kubernetes.io/projected/df8a6753-1332-4db6-b738-020474d60851-kube-api-access-7587g\") pod \"ingress-canary-5tffj\" (UID: \"df8a6753-1332-4db6-b738-020474d60851\") " pod="openshift-ingress-canary/ingress-canary-5tffj" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.020097 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfkf6\" (UniqueName: \"kubernetes.io/projected/7ec92dcf-aef5-49fd-9d97-ccf3c79decd8-kube-api-access-tfkf6\") pod \"ingress-operator-5b745b69d9-b9npd\" (UID: \"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.026279 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c35d1e2c-9135-4bff-a0af-cd20addc6134-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.026385 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-certificates\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.026414 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5711cd35-18b6-4bbb-80a2-485ab5dbdda4-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8xvkk\" (UID: \"5711cd35-18b6-4bbb-80a2-485ab5dbdda4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.026506 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5711cd35-18b6-4bbb-80a2-485ab5dbdda4-config\") pod \"kube-apiserver-operator-766d6c64bb-8xvkk\" (UID: \"5711cd35-18b6-4bbb-80a2-485ab5dbdda4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.026578 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-config\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.026602 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f4facedc-7187-46c6-b930-27c347e61b05-apiservice-cert\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.026676 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-ds4j6"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.026694 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d8be75c-bac6-4a91-b9b9-19cf893ad193-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-j5xtp\" (UID: \"1d8be75c-bac6-4a91-b9b9-19cf893ad193\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.027767 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c35d1e2c-9135-4bff-a0af-cd20addc6134-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.028812 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-tls\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.028863 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-bound-sa-token\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.028887 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sphg\" (UniqueName: \"kubernetes.io/projected/f4facedc-7187-46c6-b930-27c347e61b05-kube-api-access-5sphg\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.028924 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d8be75c-bac6-4a91-b9b9-19cf893ad193-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-j5xtp\" (UID: \"1d8be75c-bac6-4a91-b9b9-19cf893ad193\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.029018 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5711cd35-18b6-4bbb-80a2-485ab5dbdda4-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8xvkk\" (UID: \"5711cd35-18b6-4bbb-80a2-485ab5dbdda4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.029151 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvz4w\" (UniqueName: \"kubernetes.io/projected/1d8be75c-bac6-4a91-b9b9-19cf893ad193-kube-api-access-bvz4w\") pod \"kube-storage-version-migrator-operator-b67b599dd-j5xtp\" (UID: \"1d8be75c-bac6-4a91-b9b9-19cf893ad193\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.029192 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6ks9\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-kube-api-access-q6ks9\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.029266 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-trusted-ca\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.029322 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-client-ca\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.029389 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/987304c9-45fa-40ab-a687-528d1e8f69d3-serving-cert\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.029418 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/f4facedc-7187-46c6-b930-27c347e61b05-tmpfs\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.029454 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.029493 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xzjb\" (UniqueName: \"kubernetes.io/projected/987304c9-45fa-40ab-a687-528d1e8f69d3-kube-api-access-4xzjb\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.029527 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f4facedc-7187-46c6-b930-27c347e61b05-webhook-cert\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.029944 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:45.529929466 +0000 UTC m=+144.511876473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.038866 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.060886 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.075367 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.098692 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.097325 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.111327 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.117517 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.128900 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130116 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130387 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvz4w\" (UniqueName: \"kubernetes.io/projected/1d8be75c-bac6-4a91-b9b9-19cf893ad193-kube-api-access-bvz4w\") pod \"kube-storage-version-migrator-operator-b67b599dd-j5xtp\" (UID: \"1d8be75c-bac6-4a91-b9b9-19cf893ad193\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130443 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6ks9\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-kube-api-access-q6ks9\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130501 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3764ba0a-05b2-442a-96ef-0534f4a1aca0-profile-collector-cert\") pod \"catalog-operator-68c6474976-qwdnv\" (UID: \"3764ba0a-05b2-442a-96ef-0534f4a1aca0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130650 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-trusted-ca\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130701 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-449gp\" (UniqueName: \"kubernetes.io/projected/3764ba0a-05b2-442a-96ef-0534f4a1aca0-kube-api-access-449gp\") pod \"catalog-operator-68c6474976-qwdnv\" (UID: \"3764ba0a-05b2-442a-96ef-0534f4a1aca0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130782 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t2lvz\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130837 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cnsl\" (UniqueName: \"kubernetes.io/projected/00c9d661-6c2e-48e7-9747-1476d52290a8-kube-api-access-7cnsl\") pod \"marketplace-operator-79b997595-t2lvz\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130865 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-client-ca\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130913 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnzxp\" (UniqueName: \"kubernetes.io/projected/915701ce-919a-4743-b390-fa72105516e1-kube-api-access-xnzxp\") pod \"control-plane-machine-set-operator-78cbb6b69f-p5qw9\" (UID: \"915701ce-919a-4743-b390-fa72105516e1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.130996 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-csi-data-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.131028 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fp7f\" (UniqueName: \"kubernetes.io/projected/ed81cda7-7d47-476a-8a51-029a15af8417-kube-api-access-9fp7f\") pod \"service-ca-operator-777779d784-l76sw\" (UID: \"ed81cda7-7d47-476a-8a51-029a15af8417\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.131078 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/915701ce-919a-4743-b390-fa72105516e1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-p5qw9\" (UID: \"915701ce-919a-4743-b390-fa72105516e1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.131118 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/987304c9-45fa-40ab-a687-528d1e8f69d3-serving-cert\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.131190 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/f4facedc-7187-46c6-b930-27c347e61b05-tmpfs\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.131273 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xzjb\" (UniqueName: \"kubernetes.io/projected/987304c9-45fa-40ab-a687-528d1e8f69d3-kube-api-access-4xzjb\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.131318 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/07cf9cc6-a9ee-462a-811b-51051d221a8a-signing-key\") pod \"service-ca-9c57cc56f-2wrb6\" (UID: \"07cf9cc6-a9ee-462a-811b-51051d221a8a\") " pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.131388 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f4facedc-7187-46c6-b930-27c347e61b05-webhook-cert\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.133679 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c35d1e2c-9135-4bff-a0af-cd20addc6134-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.133756 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3764ba0a-05b2-442a-96ef-0534f4a1aca0-srv-cert\") pod \"catalog-operator-68c6474976-qwdnv\" (UID: \"3764ba0a-05b2-442a-96ef-0534f4a1aca0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.134001 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed81cda7-7d47-476a-8a51-029a15af8417-serving-cert\") pod \"service-ca-operator-777779d784-l76sw\" (UID: \"ed81cda7-7d47-476a-8a51-029a15af8417\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.134037 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1131acfa-a9db-4b5b-9f84-77f92597f69c-metrics-tls\") pod \"dns-default-htktd\" (UID: \"1131acfa-a9db-4b5b-9f84-77f92597f69c\") " pod="openshift-dns/dns-default-htktd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.134056 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdbtt\" (UniqueName: \"kubernetes.io/projected/07cf9cc6-a9ee-462a-811b-51051d221a8a-kube-api-access-jdbtt\") pod \"service-ca-9c57cc56f-2wrb6\" (UID: \"07cf9cc6-a9ee-462a-811b-51051d221a8a\") " pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.134089 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx47d\" (UniqueName: \"kubernetes.io/projected/1131acfa-a9db-4b5b-9f84-77f92597f69c-kube-api-access-qx47d\") pod \"dns-default-htktd\" (UID: \"1131acfa-a9db-4b5b-9f84-77f92597f69c\") " pod="openshift-dns/dns-default-htktd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.134129 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-certificates\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.134459 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/f4facedc-7187-46c6-b930-27c347e61b05-tmpfs\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.135177 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-client-ca\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.135575 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-trusted-ca\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.137744 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d731670-5f15-4d66-92bc-1e4d71645b91-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dwg5b\" (UID: \"7d731670-5f15-4d66-92bc-1e4d71645b91\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.137775 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t2lvz\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.137851 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:45.637833945 +0000 UTC m=+144.619780752 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.138071 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5711cd35-18b6-4bbb-80a2-485ab5dbdda4-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8xvkk\" (UID: \"5711cd35-18b6-4bbb-80a2-485ab5dbdda4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.139703 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-certificates\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.139774 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dtqx\" (UniqueName: \"kubernetes.io/projected/7d731670-5f15-4d66-92bc-1e4d71645b91-kube-api-access-7dtqx\") pod \"package-server-manager-789f6589d5-dwg5b\" (UID: \"7d731670-5f15-4d66-92bc-1e4d71645b91\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.139842 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k57mr\" (UniqueName: \"kubernetes.io/projected/15a6fec8-b852-4ad7-96ee-d69af750e84d-kube-api-access-k57mr\") pod \"machine-config-server-h9vhd\" (UID: \"15a6fec8-b852-4ad7-96ee-d69af750e84d\") " pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.139877 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-socket-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.140218 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h2nd\" (UniqueName: \"kubernetes.io/projected/b81e6fc2-4131-40e0-994a-73435d2a4cbe-kube-api-access-7h2nd\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.140749 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/15a6fec8-b852-4ad7-96ee-d69af750e84d-node-bootstrap-token\") pod \"machine-config-server-h9vhd\" (UID: \"15a6fec8-b852-4ad7-96ee-d69af750e84d\") " pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.140903 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5711cd35-18b6-4bbb-80a2-485ab5dbdda4-config\") pod \"kube-apiserver-operator-766d6c64bb-8xvkk\" (UID: \"5711cd35-18b6-4bbb-80a2-485ab5dbdda4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.140937 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7b4e0874-30a8-4393-9864-cce370b40d8a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jfpkc\" (UID: \"7b4e0874-30a8-4393-9864-cce370b40d8a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.141039 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed81cda7-7d47-476a-8a51-029a15af8417-config\") pod \"service-ca-operator-777779d784-l76sw\" (UID: \"ed81cda7-7d47-476a-8a51-029a15af8417\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.141061 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/15a6fec8-b852-4ad7-96ee-d69af750e84d-certs\") pod \"machine-config-server-h9vhd\" (UID: \"15a6fec8-b852-4ad7-96ee-d69af750e84d\") " pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.142147 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5711cd35-18b6-4bbb-80a2-485ab5dbdda4-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8xvkk\" (UID: \"5711cd35-18b6-4bbb-80a2-485ab5dbdda4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.142201 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-config\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.142288 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1131acfa-a9db-4b5b-9f84-77f92597f69c-config-volume\") pod \"dns-default-htktd\" (UID: \"1131acfa-a9db-4b5b-9f84-77f92597f69c\") " pod="openshift-dns/dns-default-htktd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.142657 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f4facedc-7187-46c6-b930-27c347e61b05-apiservice-cert\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.142690 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/07cf9cc6-a9ee-462a-811b-51051d221a8a-signing-cabundle\") pod \"service-ca-9c57cc56f-2wrb6\" (UID: \"07cf9cc6-a9ee-462a-811b-51051d221a8a\") " pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.142708 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5711cd35-18b6-4bbb-80a2-485ab5dbdda4-config\") pod \"kube-apiserver-operator-766d6c64bb-8xvkk\" (UID: \"5711cd35-18b6-4bbb-80a2-485ab5dbdda4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.143549 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d8be75c-bac6-4a91-b9b9-19cf893ad193-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-j5xtp\" (UID: \"1d8be75c-bac6-4a91-b9b9-19cf893ad193\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.143770 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-config\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.143848 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-mountpoint-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.143890 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-plugins-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.143980 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c35d1e2c-9135-4bff-a0af-cd20addc6134-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.144134 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d8be75c-bac6-4a91-b9b9-19cf893ad193-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-j5xtp\" (UID: \"1d8be75c-bac6-4a91-b9b9-19cf893ad193\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.144262 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-tls\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.144326 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-bound-sa-token\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.144358 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sphg\" (UniqueName: \"kubernetes.io/projected/f4facedc-7187-46c6-b930-27c347e61b05-kube-api-access-5sphg\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.144380 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c35d1e2c-9135-4bff-a0af-cd20addc6134-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.144412 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d8be75c-bac6-4a91-b9b9-19cf893ad193-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-j5xtp\" (UID: \"1d8be75c-bac6-4a91-b9b9-19cf893ad193\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.144995 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/987304c9-45fa-40ab-a687-528d1e8f69d3-serving-cert\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.145047 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-registration-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.145157 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5711cd35-18b6-4bbb-80a2-485ab5dbdda4-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8xvkk\" (UID: \"5711cd35-18b6-4bbb-80a2-485ab5dbdda4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.145400 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f4facedc-7187-46c6-b930-27c347e61b05-apiservice-cert\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.145451 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c35d1e2c-9135-4bff-a0af-cd20addc6134-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.145577 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.145688 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvq6f\" (UniqueName: \"kubernetes.io/projected/7b4e0874-30a8-4393-9864-cce370b40d8a-kube-api-access-jvq6f\") pod \"multus-admission-controller-857f4d67dd-jfpkc\" (UID: \"7b4e0874-30a8-4393-9864-cce370b40d8a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.147406 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f4facedc-7187-46c6-b930-27c347e61b05-webhook-cert\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.147755 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-tls\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.148753 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d8be75c-bac6-4a91-b9b9-19cf893ad193-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-j5xtp\" (UID: \"1d8be75c-bac6-4a91-b9b9-19cf893ad193\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.152321 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5tffj" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.159334 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.172767 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6ks9\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-kube-api-access-q6ks9\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.182302 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvz4w\" (UniqueName: \"kubernetes.io/projected/1d8be75c-bac6-4a91-b9b9-19cf893ad193-kube-api-access-bvz4w\") pod \"kube-storage-version-migrator-operator-b67b599dd-j5xtp\" (UID: \"1d8be75c-bac6-4a91-b9b9-19cf893ad193\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.217629 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xzjb\" (UniqueName: \"kubernetes.io/projected/987304c9-45fa-40ab-a687-528d1e8f69d3-kube-api-access-4xzjb\") pod \"route-controller-manager-6576b87f9c-748cf\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.222880 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-c2mlc"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.235650 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-bound-sa-token\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247179 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-mountpoint-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247208 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-plugins-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247234 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-registration-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247264 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvq6f\" (UniqueName: \"kubernetes.io/projected/7b4e0874-30a8-4393-9864-cce370b40d8a-kube-api-access-jvq6f\") pod \"multus-admission-controller-857f4d67dd-jfpkc\" (UID: \"7b4e0874-30a8-4393-9864-cce370b40d8a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247292 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3764ba0a-05b2-442a-96ef-0534f4a1aca0-profile-collector-cert\") pod \"catalog-operator-68c6474976-qwdnv\" (UID: \"3764ba0a-05b2-442a-96ef-0534f4a1aca0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247316 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-449gp\" (UniqueName: \"kubernetes.io/projected/3764ba0a-05b2-442a-96ef-0534f4a1aca0-kube-api-access-449gp\") pod \"catalog-operator-68c6474976-qwdnv\" (UID: \"3764ba0a-05b2-442a-96ef-0534f4a1aca0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247334 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t2lvz\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247350 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cnsl\" (UniqueName: \"kubernetes.io/projected/00c9d661-6c2e-48e7-9747-1476d52290a8-kube-api-access-7cnsl\") pod \"marketplace-operator-79b997595-t2lvz\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247370 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnzxp\" (UniqueName: \"kubernetes.io/projected/915701ce-919a-4743-b390-fa72105516e1-kube-api-access-xnzxp\") pod \"control-plane-machine-set-operator-78cbb6b69f-p5qw9\" (UID: \"915701ce-919a-4743-b390-fa72105516e1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247387 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-csi-data-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247403 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fp7f\" (UniqueName: \"kubernetes.io/projected/ed81cda7-7d47-476a-8a51-029a15af8417-kube-api-access-9fp7f\") pod \"service-ca-operator-777779d784-l76sw\" (UID: \"ed81cda7-7d47-476a-8a51-029a15af8417\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247420 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/915701ce-919a-4743-b390-fa72105516e1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-p5qw9\" (UID: \"915701ce-919a-4743-b390-fa72105516e1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247442 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247461 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/07cf9cc6-a9ee-462a-811b-51051d221a8a-signing-key\") pod \"service-ca-9c57cc56f-2wrb6\" (UID: \"07cf9cc6-a9ee-462a-811b-51051d221a8a\") " pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247481 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3764ba0a-05b2-442a-96ef-0534f4a1aca0-srv-cert\") pod \"catalog-operator-68c6474976-qwdnv\" (UID: \"3764ba0a-05b2-442a-96ef-0534f4a1aca0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247499 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed81cda7-7d47-476a-8a51-029a15af8417-serving-cert\") pod \"service-ca-operator-777779d784-l76sw\" (UID: \"ed81cda7-7d47-476a-8a51-029a15af8417\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247514 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1131acfa-a9db-4b5b-9f84-77f92597f69c-metrics-tls\") pod \"dns-default-htktd\" (UID: \"1131acfa-a9db-4b5b-9f84-77f92597f69c\") " pod="openshift-dns/dns-default-htktd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247530 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdbtt\" (UniqueName: \"kubernetes.io/projected/07cf9cc6-a9ee-462a-811b-51051d221a8a-kube-api-access-jdbtt\") pod \"service-ca-9c57cc56f-2wrb6\" (UID: \"07cf9cc6-a9ee-462a-811b-51051d221a8a\") " pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247551 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx47d\" (UniqueName: \"kubernetes.io/projected/1131acfa-a9db-4b5b-9f84-77f92597f69c-kube-api-access-qx47d\") pod \"dns-default-htktd\" (UID: \"1131acfa-a9db-4b5b-9f84-77f92597f69c\") " pod="openshift-dns/dns-default-htktd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247577 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d731670-5f15-4d66-92bc-1e4d71645b91-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dwg5b\" (UID: \"7d731670-5f15-4d66-92bc-1e4d71645b91\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247594 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t2lvz\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247611 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dtqx\" (UniqueName: \"kubernetes.io/projected/7d731670-5f15-4d66-92bc-1e4d71645b91-kube-api-access-7dtqx\") pod \"package-server-manager-789f6589d5-dwg5b\" (UID: \"7d731670-5f15-4d66-92bc-1e4d71645b91\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247611 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-mountpoint-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247635 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k57mr\" (UniqueName: \"kubernetes.io/projected/15a6fec8-b852-4ad7-96ee-d69af750e84d-kube-api-access-k57mr\") pod \"machine-config-server-h9vhd\" (UID: \"15a6fec8-b852-4ad7-96ee-d69af750e84d\") " pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247686 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-socket-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247738 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h2nd\" (UniqueName: \"kubernetes.io/projected/b81e6fc2-4131-40e0-994a-73435d2a4cbe-kube-api-access-7h2nd\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247765 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/15a6fec8-b852-4ad7-96ee-d69af750e84d-node-bootstrap-token\") pod \"machine-config-server-h9vhd\" (UID: \"15a6fec8-b852-4ad7-96ee-d69af750e84d\") " pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247837 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7b4e0874-30a8-4393-9864-cce370b40d8a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jfpkc\" (UID: \"7b4e0874-30a8-4393-9864-cce370b40d8a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247869 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed81cda7-7d47-476a-8a51-029a15af8417-config\") pod \"service-ca-operator-777779d784-l76sw\" (UID: \"ed81cda7-7d47-476a-8a51-029a15af8417\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247897 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/15a6fec8-b852-4ad7-96ee-d69af750e84d-certs\") pod \"machine-config-server-h9vhd\" (UID: \"15a6fec8-b852-4ad7-96ee-d69af750e84d\") " pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247932 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1131acfa-a9db-4b5b-9f84-77f92597f69c-config-volume\") pod \"dns-default-htktd\" (UID: \"1131acfa-a9db-4b5b-9f84-77f92597f69c\") " pod="openshift-dns/dns-default-htktd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.247960 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/07cf9cc6-a9ee-462a-811b-51051d221a8a-signing-cabundle\") pod \"service-ca-9c57cc56f-2wrb6\" (UID: \"07cf9cc6-a9ee-462a-811b-51051d221a8a\") " pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.250086 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-plugins-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.250151 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/07cf9cc6-a9ee-462a-811b-51051d221a8a-signing-cabundle\") pod \"service-ca-9c57cc56f-2wrb6\" (UID: \"07cf9cc6-a9ee-462a-811b-51051d221a8a\") " pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.252968 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-registration-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.253767 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3764ba0a-05b2-442a-96ef-0534f4a1aca0-profile-collector-cert\") pod \"catalog-operator-68c6474976-qwdnv\" (UID: \"3764ba0a-05b2-442a-96ef-0534f4a1aca0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.253859 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-socket-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.254476 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t2lvz\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.254637 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b81e6fc2-4131-40e0-994a-73435d2a4cbe-csi-data-dir\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.255894 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed81cda7-7d47-476a-8a51-029a15af8417-config\") pod \"service-ca-operator-777779d784-l76sw\" (UID: \"ed81cda7-7d47-476a-8a51-029a15af8417\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.256188 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed81cda7-7d47-476a-8a51-029a15af8417-serving-cert\") pod \"service-ca-operator-777779d784-l76sw\" (UID: \"ed81cda7-7d47-476a-8a51-029a15af8417\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.256488 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1131acfa-a9db-4b5b-9f84-77f92597f69c-metrics-tls\") pod \"dns-default-htktd\" (UID: \"1131acfa-a9db-4b5b-9f84-77f92597f69c\") " pod="openshift-dns/dns-default-htktd" Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.257521 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:45.75750033 +0000 UTC m=+144.739447187 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.257718 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1131acfa-a9db-4b5b-9f84-77f92597f69c-config-volume\") pod \"dns-default-htktd\" (UID: \"1131acfa-a9db-4b5b-9f84-77f92597f69c\") " pod="openshift-dns/dns-default-htktd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.260200 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7b4e0874-30a8-4393-9864-cce370b40d8a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jfpkc\" (UID: \"7b4e0874-30a8-4393-9864-cce370b40d8a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.260719 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d731670-5f15-4d66-92bc-1e4d71645b91-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dwg5b\" (UID: \"7d731670-5f15-4d66-92bc-1e4d71645b91\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.261890 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/15a6fec8-b852-4ad7-96ee-d69af750e84d-node-bootstrap-token\") pod \"machine-config-server-h9vhd\" (UID: \"15a6fec8-b852-4ad7-96ee-d69af750e84d\") " pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.262477 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3764ba0a-05b2-442a-96ef-0534f4a1aca0-srv-cert\") pod \"catalog-operator-68c6474976-qwdnv\" (UID: \"3764ba0a-05b2-442a-96ef-0534f4a1aca0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.274589 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/915701ce-919a-4743-b390-fa72105516e1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-p5qw9\" (UID: \"915701ce-919a-4743-b390-fa72105516e1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.279510 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.279844 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sphg\" (UniqueName: \"kubernetes.io/projected/f4facedc-7187-46c6-b930-27c347e61b05-kube-api-access-5sphg\") pod \"packageserver-d55dfcdfc-v447t\" (UID: \"f4facedc-7187-46c6-b930-27c347e61b05\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.281177 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/15a6fec8-b852-4ad7-96ee-d69af750e84d-certs\") pod \"machine-config-server-h9vhd\" (UID: \"15a6fec8-b852-4ad7-96ee-d69af750e84d\") " pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.282184 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t2lvz\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.283487 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/07cf9cc6-a9ee-462a-811b-51051d221a8a-signing-key\") pod \"service-ca-9c57cc56f-2wrb6\" (UID: \"07cf9cc6-a9ee-462a-811b-51051d221a8a\") " pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.297327 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5711cd35-18b6-4bbb-80a2-485ab5dbdda4-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8xvkk\" (UID: \"5711cd35-18b6-4bbb-80a2-485ab5dbdda4\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.304718 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mts5h"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.306835 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.316412 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvq6f\" (UniqueName: \"kubernetes.io/projected/7b4e0874-30a8-4393-9864-cce370b40d8a-kube-api-access-jvq6f\") pod \"multus-admission-controller-857f4d67dd-jfpkc\" (UID: \"7b4e0874-30a8-4393-9864-cce370b40d8a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.346163 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k57mr\" (UniqueName: \"kubernetes.io/projected/15a6fec8-b852-4ad7-96ee-d69af750e84d-kube-api-access-k57mr\") pod \"machine-config-server-h9vhd\" (UID: \"15a6fec8-b852-4ad7-96ee-d69af750e84d\") " pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.348056 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.348642 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.352969 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:45.852916826 +0000 UTC m=+144.834863633 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.353057 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.354013 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:45.854001023 +0000 UTC m=+144.835947830 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.359874 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-449gp\" (UniqueName: \"kubernetes.io/projected/3764ba0a-05b2-442a-96ef-0534f4a1aca0-kube-api-access-449gp\") pod \"catalog-operator-68c6474976-qwdnv\" (UID: \"3764ba0a-05b2-442a-96ef-0534f4a1aca0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:45 crc kubenswrapper[4708]: W0203 07:12:45.367223 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podabf360a5_e982_4b3e_a814_511d57e9073f.slice/crio-de59c3d1e1b9d416e5c463cfa680b2a05ee5dbb4cef2abdebbd76f86ed58b9e0 WatchSource:0}: Error finding container de59c3d1e1b9d416e5c463cfa680b2a05ee5dbb4cef2abdebbd76f86ed58b9e0: Status 404 returned error can't find the container with id de59c3d1e1b9d416e5c463cfa680b2a05ee5dbb4cef2abdebbd76f86ed58b9e0 Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.368053 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.369230 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.371423 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.378373 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.390757 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.405988 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx47d\" (UniqueName: \"kubernetes.io/projected/1131acfa-a9db-4b5b-9f84-77f92597f69c-kube-api-access-qx47d\") pod \"dns-default-htktd\" (UID: \"1131acfa-a9db-4b5b-9f84-77f92597f69c\") " pod="openshift-dns/dns-default-htktd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.414726 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdbtt\" (UniqueName: \"kubernetes.io/projected/07cf9cc6-a9ee-462a-811b-51051d221a8a-kube-api-access-jdbtt\") pod \"service-ca-9c57cc56f-2wrb6\" (UID: \"07cf9cc6-a9ee-462a-811b-51051d221a8a\") " pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.419198 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dtqx\" (UniqueName: \"kubernetes.io/projected/7d731670-5f15-4d66-92bc-1e4d71645b91-kube-api-access-7dtqx\") pod \"package-server-manager-789f6589d5-dwg5b\" (UID: \"7d731670-5f15-4d66-92bc-1e4d71645b91\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.426428 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.431513 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.436534 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h2nd\" (UniqueName: \"kubernetes.io/projected/b81e6fc2-4131-40e0-994a-73435d2a4cbe-kube-api-access-7h2nd\") pod \"csi-hostpathplugin-btjfc\" (UID: \"b81e6fc2-4131-40e0-994a-73435d2a4cbe\") " pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.438292 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.453974 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.454465 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:45.954442816 +0000 UTC m=+144.936389623 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.460372 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cnsl\" (UniqueName: \"kubernetes.io/projected/00c9d661-6c2e-48e7-9747-1476d52290a8-kube-api-access-7cnsl\") pod \"marketplace-operator-79b997595-t2lvz\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.473476 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.477090 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnzxp\" (UniqueName: \"kubernetes.io/projected/915701ce-919a-4743-b390-fa72105516e1-kube-api-access-xnzxp\") pod \"control-plane-machine-set-operator-78cbb6b69f-p5qw9\" (UID: \"915701ce-919a-4743-b390-fa72105516e1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.481724 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.496408 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.499553 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fp7f\" (UniqueName: \"kubernetes.io/projected/ed81cda7-7d47-476a-8a51-029a15af8417-kube-api-access-9fp7f\") pod \"service-ca-operator-777779d784-l76sw\" (UID: \"ed81cda7-7d47-476a-8a51-029a15af8417\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.503510 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.511047 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.520574 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-htktd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.550136 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-h9vhd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.550556 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-btjfc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.555111 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.556972 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.05694546 +0000 UTC m=+145.038892267 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.580355 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.603868 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-mq55f"] Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.660023 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.660483 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.16046406 +0000 UTC m=+145.142410867 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: W0203 07:12:45.688646 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15a6fec8_b852_4ad7_96ee_d69af750e84d.slice/crio-708f345072273623d7e06858a9f8a7d3a7dde7a50011f0fb31387193b693c9ad WatchSource:0}: Error finding container 708f345072273623d7e06858a9f8a7d3a7dde7a50011f0fb31387193b693c9ad: Status 404 returned error can't find the container with id 708f345072273623d7e06858a9f8a7d3a7dde7a50011f0fb31387193b693c9ad Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.761371 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.761765 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.261751843 +0000 UTC m=+145.243698650 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.765839 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.864714 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.864948 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.364916104 +0000 UTC m=+145.346862911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.865135 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.865627 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.365611681 +0000 UTC m=+145.347558578 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.910526 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" event={"ID":"2ad9206f-443a-4f4b-820f-f017581bb341","Type":"ContainerStarted","Data":"844f14c1dcc91a20c4fe1238c7a9ddb84c158db95713b94fae958ac66dab69c4"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.913847 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" event={"ID":"d24b4086-5d15-4100-b580-9d9b69aa7602","Type":"ContainerStarted","Data":"bf4d0e811522a765495c2afaa9ddb30e56c2ac0ef4e5eb944d1a63cc121d5263"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.961506 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zrxh2" event={"ID":"f8f529d7-932a-4047-b603-f84e03fe6898","Type":"ContainerStarted","Data":"fd0ee474fc6296ca46faac3e1c1e3fc7410f66d6c230852291cfb680c1d7ee4a"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.964889 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" event={"ID":"cb71375d-01dd-442b-ac48-a7f26ccde85d","Type":"ContainerStarted","Data":"e6049c15c9bd0915fd7aac093aa1f5914163c394cbfa1a55479d5537d854bba7"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.966298 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:45 crc kubenswrapper[4708]: E0203 07:12:45.966676 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.4666613 +0000 UTC m=+145.448608107 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.966805 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-j5cvd" event={"ID":"0dfe66dc-684e-49ba-932e-11e3d0eff5b0","Type":"ContainerStarted","Data":"af1000fb2320d00eb4f67fa321b30c4f12f0a285232097291fa23d0521517221"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.967447 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.968698 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" event={"ID":"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8","Type":"ContainerStarted","Data":"3736b661b98d9360456409aff78a2661e1a9cbe7d3f6bb03beb0a630bf879b93"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.968931 4708 patch_prober.go:28] interesting pod/console-operator-58897d9998-j5cvd container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.968965 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-j5cvd" podUID="0dfe66dc-684e-49ba-932e-11e3d0eff5b0" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.970568 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" event={"ID":"a1054514-9df0-4244-938c-9c6430b8183b","Type":"ContainerStarted","Data":"12360beb07c3a68451f6609c4706227e6a2e6210a799dd89fa777b2a81597c67"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.973068 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" event={"ID":"b20ffaa1-a31e-451e-89a3-5e36287a0c5b","Type":"ContainerStarted","Data":"314d72ceb99323cb9a27af5f8a2e09f3a0432c7e48d65e440b6f44489a166dd7"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.973126 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" event={"ID":"b20ffaa1-a31e-451e-89a3-5e36287a0c5b","Type":"ContainerStarted","Data":"e37d66047ddb5c73203bac3742e3dbf0c35643f872ab2985c4fbffd385c2711d"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.974249 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" event={"ID":"abf360a5-e982-4b3e-a814-511d57e9073f","Type":"ContainerStarted","Data":"de59c3d1e1b9d416e5c463cfa680b2a05ee5dbb4cef2abdebbd76f86ed58b9e0"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.976835 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" event={"ID":"544ec176-92f4-4374-ae80-2ed8717172d1","Type":"ContainerStarted","Data":"2c38e0b20168da40a758b3fc7951213c921bcf1845567664f5d86fbdbe6da770"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.976861 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" event={"ID":"544ec176-92f4-4374-ae80-2ed8717172d1","Type":"ContainerStarted","Data":"378917f2bf0ab2d0e3524003a5e233f583716697e072569bbe2ed42325861819"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.977921 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" event={"ID":"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46","Type":"ContainerStarted","Data":"dc83ed1385e861698b3ff014981254e530bcfec317e357ce333f716f7d4b91ee"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.978639 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" event={"ID":"69843c19-ead2-4248-a016-c29c58ddddc3","Type":"ContainerStarted","Data":"06eb1d4a7df8f33dbe000118999e9f5bc5dd198b4aff72f43385a30b823f92ee"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.979530 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-c2mlc" event={"ID":"7ee20271-e2ce-4476-a011-5e00e19126bf","Type":"ContainerStarted","Data":"a0789132ce68e4fc5321d81b744a1136e05b8e7c9e2cd32b170c49227e3f560e"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.980616 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" event={"ID":"3bb818a6-b7dd-4e6a-b767-394bca081222","Type":"ContainerStarted","Data":"47d426cb723e4fbac8d702e4b3bb28a72a0901732d05153f88fa64aa84ad6cad"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.982049 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" event={"ID":"d1edd916-a3bf-4331-abba-d5c8753d4377","Type":"ContainerStarted","Data":"4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.982089 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" event={"ID":"d1edd916-a3bf-4331-abba-d5c8753d4377","Type":"ContainerStarted","Data":"49a55fc13bcb2a8944212ccb5cf9a50ce1790e5b307de5ff5933e629d8559486"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.982225 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.983647 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zd8kn" event={"ID":"46ee42c1-592d-47c3-85ba-ead60edf7aca","Type":"ContainerStarted","Data":"cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.986270 4708 generic.go:334] "Generic (PLEG): container finished" podID="c4d3118a-28d4-403b-95c3-f2a11c14846d" containerID="3e149e46b83d95769cb3330d46abd05dca7df7bbb804e7c1a5761f468923e1c3" exitCode=0 Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.986324 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" event={"ID":"c4d3118a-28d4-403b-95c3-f2a11c14846d","Type":"ContainerDied","Data":"3e149e46b83d95769cb3330d46abd05dca7df7bbb804e7c1a5761f468923e1c3"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.986343 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" event={"ID":"c4d3118a-28d4-403b-95c3-f2a11c14846d","Type":"ContainerStarted","Data":"cefaf6f40a1d62cdd52023f9d588ebfa76ea03669d2cb56c3a3ddd1a7784840f"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.989085 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" event={"ID":"a34c5792-5895-4d08-9e7e-b3948f5be096","Type":"ContainerStarted","Data":"29c2a45c2fbfc8b7e9d47e1dcd3c1f1e7a38ecb54a45463c47bd34512b1fcafe"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.989111 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" event={"ID":"a34c5792-5895-4d08-9e7e-b3948f5be096","Type":"ContainerStarted","Data":"3bfc220e4c6149c2c353ae3c741ddf9cc75aba874625ba2160d105b6b5772904"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.990668 4708 generic.go:334] "Generic (PLEG): container finished" podID="283462c1-8f31-40aa-b570-96fff19ff3d0" containerID="8076c215ab6b07fe8d82d76bd579281b78e62e5393dd495cf3754937654ed3a8" exitCode=0 Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.990758 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" event={"ID":"283462c1-8f31-40aa-b570-96fff19ff3d0","Type":"ContainerDied","Data":"8076c215ab6b07fe8d82d76bd579281b78e62e5393dd495cf3754937654ed3a8"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.990778 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" event={"ID":"283462c1-8f31-40aa-b570-96fff19ff3d0","Type":"ContainerStarted","Data":"701145c84ef168597e9ab0a62c90ce080c2a901a7a099b5224a0657e8e952979"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.992563 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" event={"ID":"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6","Type":"ContainerStarted","Data":"a913441fe669bd7247f09f337c749ffb62e2471802d62011a6482594d98ae3ac"} Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.994872 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:45 crc kubenswrapper[4708]: I0203 07:12:45.996111 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-h9vhd" event={"ID":"15a6fec8-b852-4ad7-96ee-d69af750e84d","Type":"ContainerStarted","Data":"708f345072273623d7e06858a9f8a7d3a7dde7a50011f0fb31387193b693c9ad"} Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.060033 4708 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-xpdpr container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.060089 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" podUID="d1edd916-a3bf-4331-abba-d5c8753d4377" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.070722 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:46 crc kubenswrapper[4708]: E0203 07:12:46.078866 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.578852747 +0000 UTC m=+145.560799554 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.176187 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:46 crc kubenswrapper[4708]: E0203 07:12:46.176715 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.676695284 +0000 UTC m=+145.658642091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.277473 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:46 crc kubenswrapper[4708]: E0203 07:12:46.278303 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.778277735 +0000 UTC m=+145.760224712 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.320735 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.339586 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.341107 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.342638 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5tffj"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.344543 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.346169 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.347712 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.379118 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:46 crc kubenswrapper[4708]: E0203 07:12:46.379362 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.879330462 +0000 UTC m=+145.861277269 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.379549 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:46 crc kubenswrapper[4708]: E0203 07:12:46.379930 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.879919348 +0000 UTC m=+145.861866225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:46 crc kubenswrapper[4708]: W0203 07:12:46.460893 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a2f3fb0_fc4d_49fe_a84f_9bfbc39280ea.slice/crio-d228cb06d5b5fe35aad81665eda5da27ccc0d3935d7d8a33818b4a00514105ea WatchSource:0}: Error finding container d228cb06d5b5fe35aad81665eda5da27ccc0d3935d7d8a33818b4a00514105ea: Status 404 returned error can't find the container with id d228cb06d5b5fe35aad81665eda5da27ccc0d3935d7d8a33818b4a00514105ea Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.483639 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:46 crc kubenswrapper[4708]: E0203 07:12:46.483907 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:46.983881848 +0000 UTC m=+145.965828655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.549990 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.564292 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.565038 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.579925 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.585441 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:46 crc kubenswrapper[4708]: E0203 07:12:46.585733 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:47.085720836 +0000 UTC m=+146.067667633 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:46 crc kubenswrapper[4708]: W0203 07:12:46.613622 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4facedc_7187_46c6_b930_27c347e61b05.slice/crio-3d8d1a9265b02d313c822fd549f76fb7f8ec1bc1aba196e8bd0eff04e6bd1fbc WatchSource:0}: Error finding container 3d8d1a9265b02d313c822fd549f76fb7f8ec1bc1aba196e8bd0eff04e6bd1fbc: Status 404 returned error can't find the container with id 3d8d1a9265b02d313c822fd549f76fb7f8ec1bc1aba196e8bd0eff04e6bd1fbc Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.698494 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:46 crc kubenswrapper[4708]: E0203 07:12:46.698934 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:47.198918069 +0000 UTC m=+146.180864876 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.811290 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:46 crc kubenswrapper[4708]: E0203 07:12:46.811620 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:47.311608988 +0000 UTC m=+146.293555805 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.828899 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.831700 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-htktd"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.832264 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-btjfc"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.847185 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.850110 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jfpkc"] Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.912037 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:46 crc kubenswrapper[4708]: E0203 07:12:46.912572 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:47.412548493 +0000 UTC m=+146.394495310 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:46 crc kubenswrapper[4708]: W0203 07:12:46.917411 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3764ba0a_05b2_442a_96ef_0534f4a1aca0.slice/crio-6946b5e77b48ba4fb6792aa8e7320054781439c40a0a29073d76b67d5946dd9c WatchSource:0}: Error finding container 6946b5e77b48ba4fb6792aa8e7320054781439c40a0a29073d76b67d5946dd9c: Status 404 returned error can't find the container with id 6946b5e77b48ba4fb6792aa8e7320054781439c40a0a29073d76b67d5946dd9c Feb 03 07:12:46 crc kubenswrapper[4708]: W0203 07:12:46.925882 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb81e6fc2_4131_40e0_994a_73435d2a4cbe.slice/crio-0f883a8860c4a88205fb9ae6b536289b4f70cfb50b5ad58d8b65b75edf2b7b2e WatchSource:0}: Error finding container 0f883a8860c4a88205fb9ae6b536289b4f70cfb50b5ad58d8b65b75edf2b7b2e: Status 404 returned error can't find the container with id 0f883a8860c4a88205fb9ae6b536289b4f70cfb50b5ad58d8b65b75edf2b7b2e Feb 03 07:12:46 crc kubenswrapper[4708]: W0203 07:12:46.949494 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1131acfa_a9db_4b5b_9f84_77f92597f69c.slice/crio-78c04264ab57bd4d71f2091281e1aefa9d768aef6c20dc6497dc2cc6684d3a4f WatchSource:0}: Error finding container 78c04264ab57bd4d71f2091281e1aefa9d768aef6c20dc6497dc2cc6684d3a4f: Status 404 returned error can't find the container with id 78c04264ab57bd4d71f2091281e1aefa9d768aef6c20dc6497dc2cc6684d3a4f Feb 03 07:12:46 crc kubenswrapper[4708]: I0203 07:12:46.996324 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t2lvz"] Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.013812 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-2wrb6"] Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.014691 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.014960 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:47.514948685 +0000 UTC m=+146.496895492 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.022010 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp"] Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.023086 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" event={"ID":"f52e0f35-b2d7-40ed-8e44-3c4408657eb0","Type":"ContainerStarted","Data":"3dbdefe406096967f4395203dd7ea6e8774147a4c2eafd20571ef3ed997a9bf1"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.024232 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" event={"ID":"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46","Type":"ContainerStarted","Data":"14997e0105e641fda339800aa04be25cb5d4b9e8c5a3ba1607c8e43870b43f6a"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.032995 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b"] Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.033555 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" event={"ID":"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea","Type":"ContainerStarted","Data":"722340a22f3d6814db96e009e1dea2ac9f35901fe156ba0ced857c96daf43e84"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.033574 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" event={"ID":"0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea","Type":"ContainerStarted","Data":"d228cb06d5b5fe35aad81665eda5da27ccc0d3935d7d8a33818b4a00514105ea"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.034312 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.037272 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-l76sw"] Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.038347 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" event={"ID":"283462c1-8f31-40aa-b570-96fff19ff3d0","Type":"ContainerStarted","Data":"24dc97f8bd6fdbbc5ebe3d4e760e98c55816900f95ce50482a546ed459aa7cf1"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.038832 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.039006 4708 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-mkbsf container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.039038 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" podUID="0a2f3fb0-fc4d-49fe-a84f-9bfbc39280ea" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.040599 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-j5cvd" podStartSLOduration=123.040585659 podStartE2EDuration="2m3.040585659s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.022914415 +0000 UTC m=+146.004861222" watchObservedRunningTime="2026-02-03 07:12:47.040585659 +0000 UTC m=+146.022532466" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.047904 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" podStartSLOduration=123.047889532 podStartE2EDuration="2m3.047889532s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.046363044 +0000 UTC m=+146.028309851" watchObservedRunningTime="2026-02-03 07:12:47.047889532 +0000 UTC m=+146.029836339" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.093008 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" event={"ID":"f4facedc-7187-46c6-b930-27c347e61b05","Type":"ContainerStarted","Data":"3d8d1a9265b02d313c822fd549f76fb7f8ec1bc1aba196e8bd0eff04e6bd1fbc"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.105463 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-vzmxn" podStartSLOduration=124.105446568 podStartE2EDuration="2m4.105446568s" podCreationTimestamp="2026-02-03 07:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.076126461 +0000 UTC m=+146.058073278" watchObservedRunningTime="2026-02-03 07:12:47.105446568 +0000 UTC m=+146.087393375" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.107110 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-zd8kn" podStartSLOduration=123.107104479 podStartE2EDuration="2m3.107104479s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.099220782 +0000 UTC m=+146.081167589" watchObservedRunningTime="2026-02-03 07:12:47.107104479 +0000 UTC m=+146.089051286" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.110927 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" event={"ID":"6d0028e2-24a1-479d-805d-9ac66cfdd68a","Type":"ContainerStarted","Data":"c9d6f819085a84f1dd6b1b443aec8fe8771e750d736ce967488a953d75f837ad"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.114890 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" event={"ID":"69843c19-ead2-4248-a016-c29c58ddddc3","Type":"ContainerStarted","Data":"d29ce45a0a8a2386a85b1f25dd7fa25d18bc094eaaab9bf8eb88775a638508c0"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.115832 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.118092 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:47.618069635 +0000 UTC m=+146.600016492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.119661 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-c2mlc" event={"ID":"7ee20271-e2ce-4476-a011-5e00e19126bf","Type":"ContainerStarted","Data":"d1ea734d6c3837178dfc794ec090d763641da731fabbacac5c75dbf1d67ede7d"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.120337 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-c2mlc" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.130098 4708 patch_prober.go:28] interesting pod/downloads-7954f5f757-c2mlc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.130151 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-c2mlc" podUID="7ee20271-e2ce-4476-a011-5e00e19126bf" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.132177 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" event={"ID":"3764ba0a-05b2-442a-96ef-0534f4a1aca0","Type":"ContainerStarted","Data":"6946b5e77b48ba4fb6792aa8e7320054781439c40a0a29073d76b67d5946dd9c"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.143287 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" event={"ID":"5711cd35-18b6-4bbb-80a2-485ab5dbdda4","Type":"ContainerStarted","Data":"660a1fd359511e4d49016110d109b6af32af7a7035726a6074afe7cc7b65f56d"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.206460 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-ds4j6" podStartSLOduration=123.206439064 podStartE2EDuration="2m3.206439064s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.15335177 +0000 UTC m=+146.135298577" watchObservedRunningTime="2026-02-03 07:12:47.206439064 +0000 UTC m=+146.188385871" Feb 03 07:12:47 crc kubenswrapper[4708]: W0203 07:12:47.216004 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d8be75c_bac6_4a91_b9b9_19cf893ad193.slice/crio-2f7d19b399f2e70b2c2721199563c8abf0ebf12a00baec76e373069985bc8416 WatchSource:0}: Error finding container 2f7d19b399f2e70b2c2721199563c8abf0ebf12a00baec76e373069985bc8416: Status 404 returned error can't find the container with id 2f7d19b399f2e70b2c2721199563c8abf0ebf12a00baec76e373069985bc8416 Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.218646 4708 csr.go:261] certificate signing request csr-r5wr4 is approved, waiting to be issued Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.219254 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.219503 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:47.719494281 +0000 UTC m=+146.701441078 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.225198 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" event={"ID":"d24b4086-5d15-4100-b580-9d9b69aa7602","Type":"ContainerStarted","Data":"6430caf09ed4a7d3f52f3a86db7d95f97283f9db7321d90793cc750717adab43"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.229190 4708 csr.go:257] certificate signing request csr-r5wr4 is issued Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.236119 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" podStartSLOduration=123.236098948 podStartE2EDuration="2m3.236098948s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.208122736 +0000 UTC m=+146.190069543" watchObservedRunningTime="2026-02-03 07:12:47.236098948 +0000 UTC m=+146.218045765" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.255026 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" event={"ID":"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8","Type":"ContainerStarted","Data":"5cf36d94217a32a3d2ed03f5139fbf619631443c06db3f7e77e4bfbf384a3634"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.263834 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5tffj" event={"ID":"df8a6753-1332-4db6-b738-020474d60851","Type":"ContainerStarted","Data":"0e0b98b31b1cac25a476ea0e2c9f7aab950122676b3106bd496d58ab25611d5a"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.271392 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-9fjv2" podStartSLOduration=123.271370194 podStartE2EDuration="2m3.271370194s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.260491262 +0000 UTC m=+146.242438089" watchObservedRunningTime="2026-02-03 07:12:47.271370194 +0000 UTC m=+146.253317001" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.274681 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" event={"ID":"abf360a5-e982-4b3e-a814-511d57e9073f","Type":"ContainerStarted","Data":"9b7d38d386f26806d2d02fb5b9794e5438739bd01fd488cfc746c2b39aa711ad"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.288052 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" event={"ID":"987304c9-45fa-40ab-a687-528d1e8f69d3","Type":"ContainerStarted","Data":"4157c3ec9b21115051822dc58c5100e509523f86163695168100bbcfdb6b6e8d"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.296271 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" event={"ID":"2ad9206f-443a-4f4b-820f-f017581bb341","Type":"ContainerStarted","Data":"dd2536a4ab35869d52c9db7d4627210f052e59b4f242e09fca0fa81469029722"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.296314 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" event={"ID":"2ad9206f-443a-4f4b-820f-f017581bb341","Type":"ContainerStarted","Data":"7d31e1672f68425e9ac2109e676f8c13424b7a8c21bfa23d59be87adf10c1323"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.305997 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-mq55f" podStartSLOduration=123.305979583 podStartE2EDuration="2m3.305979583s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.299189463 +0000 UTC m=+146.281136270" watchObservedRunningTime="2026-02-03 07:12:47.305979583 +0000 UTC m=+146.287926390" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.308110 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-h9vhd" event={"ID":"15a6fec8-b852-4ad7-96ee-d69af750e84d","Type":"ContainerStarted","Data":"58c6211570572206c8b4ba8bf23cd96840a5dac3b110c35357882d85ea28097c"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.322272 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.323611 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:47.823590446 +0000 UTC m=+146.805537253 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.323999 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" event={"ID":"a1054514-9df0-4244-938c-9c6430b8183b","Type":"ContainerStarted","Data":"4b155ede5d96663450c6c9fc43490a97dbeb5eeb7a500e0336d95049be872877"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.351053 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-c2mlc" podStartSLOduration=123.351031095 podStartE2EDuration="2m3.351031095s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.348340997 +0000 UTC m=+146.330287804" watchObservedRunningTime="2026-02-03 07:12:47.351031095 +0000 UTC m=+146.332977902" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.374719 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" podStartSLOduration=123.374700419 podStartE2EDuration="2m3.374700419s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.370670798 +0000 UTC m=+146.352617605" watchObservedRunningTime="2026-02-03 07:12:47.374700419 +0000 UTC m=+146.356647226" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.374742 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" event={"ID":"915701ce-919a-4743-b390-fa72105516e1","Type":"ContainerStarted","Data":"45c3fdf1a078d7bd6bd44aaed56810f1f1e4ed9813dbb8f3f030b6dee4d1548a"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.379567 4708 generic.go:334] "Generic (PLEG): container finished" podID="3bb818a6-b7dd-4e6a-b767-394bca081222" containerID="fbccdfa6b98f41c652dccf306f973d51ec4c288c62074760b6b5b5eb7837ca45" exitCode=0 Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.379615 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" event={"ID":"3bb818a6-b7dd-4e6a-b767-394bca081222","Type":"ContainerDied","Data":"fbccdfa6b98f41c652dccf306f973d51ec4c288c62074760b6b5b5eb7837ca45"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.399726 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" event={"ID":"12242d4a-0312-44a6-8283-9794875efda5","Type":"ContainerStarted","Data":"92bc5add2d2d8ad8482215925da183b37819a13af8c2be5ae10fa0fbb4499743"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.414550 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" event={"ID":"7b4e0874-30a8-4393-9864-cce370b40d8a","Type":"ContainerStarted","Data":"2993e419cfe39cc7581f9cd5cff26267c7ea1306181a88532db1f52c974b741e"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.416958 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" podStartSLOduration=123.41694399 podStartE2EDuration="2m3.41694399s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.414889529 +0000 UTC m=+146.396836336" watchObservedRunningTime="2026-02-03 07:12:47.41694399 +0000 UTC m=+146.398890797" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.423553 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.426759 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:47.925145616 +0000 UTC m=+146.907092423 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.429488 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-htktd" event={"ID":"1131acfa-a9db-4b5b-9f84-77f92597f69c","Type":"ContainerStarted","Data":"78c04264ab57bd4d71f2091281e1aefa9d768aef6c20dc6497dc2cc6684d3a4f"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.436517 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-btjfc" event={"ID":"b81e6fc2-4131-40e0-994a-73435d2a4cbe","Type":"ContainerStarted","Data":"0f883a8860c4a88205fb9ae6b536289b4f70cfb50b5ad58d8b65b75edf2b7b2e"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.448587 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" event={"ID":"ead3a61c-4b09-4f98-866a-1e66ed92d084","Type":"ContainerStarted","Data":"020dacc38d0650a08e8015258bc13086af8fee2824476ebe6b8e3c907f21aed8"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.478641 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-ghh7f" podStartSLOduration=123.478628019 podStartE2EDuration="2m3.478628019s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.478322301 +0000 UTC m=+146.460269098" watchObservedRunningTime="2026-02-03 07:12:47.478628019 +0000 UTC m=+146.460574836" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.490152 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zrxh2" event={"ID":"f8f529d7-932a-4047-b603-f84e03fe6898","Type":"ContainerStarted","Data":"57404727711dce010cedc6db32e7c0aa6a2212322698b4f8b96aa1db31198471"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.494589 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2" event={"ID":"f6237c57-eb31-40c5-8b6c-75a77a58ccdb","Type":"ContainerStarted","Data":"d192dd6de14e6daf34baeccc28c49e7f6912f11e6ba1ffe7e8148b203059863a"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.494640 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2" event={"ID":"f6237c57-eb31-40c5-8b6c-75a77a58ccdb","Type":"ContainerStarted","Data":"e555ebf28957642110f6fd8861adfcf01277d1e90887f6521234f179d1df08e6"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.508297 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" podStartSLOduration=124.508276604 podStartE2EDuration="2m4.508276604s" podCreationTimestamp="2026-02-03 07:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.503385541 +0000 UTC m=+146.485332348" watchObservedRunningTime="2026-02-03 07:12:47.508276604 +0000 UTC m=+146.490223411" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.512771 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" event={"ID":"cb71375d-01dd-442b-ac48-a7f26ccde85d","Type":"ContainerStarted","Data":"42fb15b0f543c9219c0d3730f56cc762c16f5c1070a735b3422d45f043ac27ec"} Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.526062 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.527013 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.026998904 +0000 UTC m=+147.008945701 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.527980 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vtsww" podStartSLOduration=123.527969879 podStartE2EDuration="2m3.527969879s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.52563171 +0000 UTC m=+146.507578527" watchObservedRunningTime="2026-02-03 07:12:47.527969879 +0000 UTC m=+146.509916676" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.529989 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-j5cvd" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.551735 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.616429 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-h9vhd" podStartSLOduration=5.6164129 podStartE2EDuration="5.6164129s" podCreationTimestamp="2026-02-03 07:12:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.615564988 +0000 UTC m=+146.597511795" watchObservedRunningTime="2026-02-03 07:12:47.6164129 +0000 UTC m=+146.598359707" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.632643 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.645573 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.145562542 +0000 UTC m=+147.127509349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.659906 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-gldbt" podStartSLOduration=123.659891291 podStartE2EDuration="2m3.659891291s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.659611535 +0000 UTC m=+146.641558342" watchObservedRunningTime="2026-02-03 07:12:47.659891291 +0000 UTC m=+146.641838098" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.749035 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.749329 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.249306757 +0000 UTC m=+147.231253564 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.749564 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.749919 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.249911882 +0000 UTC m=+147.231858689 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.755706 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcrbn" podStartSLOduration=123.755692687 podStartE2EDuration="2m3.755692687s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.755291837 +0000 UTC m=+146.737238644" watchObservedRunningTime="2026-02-03 07:12:47.755692687 +0000 UTC m=+146.737639494" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.757611 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-5tffj" podStartSLOduration=5.757603335 podStartE2EDuration="5.757603335s" podCreationTimestamp="2026-02-03 07:12:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.719521789 +0000 UTC m=+146.701468596" watchObservedRunningTime="2026-02-03 07:12:47.757603335 +0000 UTC m=+146.739550142" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.850448 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.858993 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.358965161 +0000 UTC m=+147.340911968 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.913128 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-zrxh2" podStartSLOduration=123.91310305 podStartE2EDuration="2m3.91310305s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:47.905191131 +0000 UTC m=+146.887137938" watchObservedRunningTime="2026-02-03 07:12:47.91310305 +0000 UTC m=+146.895049857" Feb 03 07:12:47 crc kubenswrapper[4708]: I0203 07:12:47.960264 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:47 crc kubenswrapper[4708]: E0203 07:12:47.960588 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.460577063 +0000 UTC m=+147.442523870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.061413 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.063840 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.563784694 +0000 UTC m=+147.545731501 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.121190 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.122787 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:48 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:48 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:48 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.122878 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.164676 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.165126 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.665108929 +0000 UTC m=+147.647055736 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.230086 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-03 07:07:47 +0000 UTC, rotation deadline is 2026-12-20 06:06:51.612257999 +0000 UTC Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.230329 4708 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7678h54m3.381933903s for next certificate rotation Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.277075 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.277459 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.77744488 +0000 UTC m=+147.759391687 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.378658 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.379057 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.879035242 +0000 UTC m=+147.860982109 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.479287 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.479453 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.979427752 +0000 UTC m=+147.961374559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.479753 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.480058 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:48.980048628 +0000 UTC m=+147.961995435 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.540661 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" event={"ID":"ead3a61c-4b09-4f98-866a-1e66ed92d084","Type":"ContainerStarted","Data":"942332b768abde049c122f3277e1c2843ed1c90c11c281184c9c75022ffb48e5"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.561046 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2" event={"ID":"f6237c57-eb31-40c5-8b6c-75a77a58ccdb","Type":"ContainerStarted","Data":"d3fb235131db965a4f5a5ffc9fe45e2a27b7c605a24e656707c9c64f550d0045"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.572485 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" event={"ID":"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46","Type":"ContainerStarted","Data":"ffaecfa484837a4b5bc7f70965af208307a2c501dd23ec8ef59450359c02c133"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.580324 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.580712 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:49.080696806 +0000 UTC m=+148.062643613 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.592907 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5c2p2" podStartSLOduration=124.592891871 podStartE2EDuration="2m4.592891871s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:48.592446441 +0000 UTC m=+147.574393248" watchObservedRunningTime="2026-02-03 07:12:48.592891871 +0000 UTC m=+147.574838678" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.596083 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" event={"ID":"00c9d661-6c2e-48e7-9747-1476d52290a8","Type":"ContainerStarted","Data":"35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.596126 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" event={"ID":"00c9d661-6c2e-48e7-9747-1476d52290a8","Type":"ContainerStarted","Data":"c220d7283f48711217527650d279712c517a49b852d36354d878448ae936d5c4"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.596851 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.598837 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" event={"ID":"7d731670-5f15-4d66-92bc-1e4d71645b91","Type":"ContainerStarted","Data":"8bc7491165e957deb6f7832e2956bf985f95fac8d9cd998b36eac0f1acbe7351"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.598869 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" event={"ID":"7d731670-5f15-4d66-92bc-1e4d71645b91","Type":"ContainerStarted","Data":"f2833954aab327235f79e79a4b01fd341e927a63e07c56818b82b22f98dad2f7"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.598879 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" event={"ID":"7d731670-5f15-4d66-92bc-1e4d71645b91","Type":"ContainerStarted","Data":"f2de20bb0437b541b505607c97c2ab627c0c236146a444e81678517ad3c2eb3a"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.599306 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.613804 4708 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-t2lvz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.613863 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" podUID="00c9d661-6c2e-48e7-9747-1476d52290a8" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.617407 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" event={"ID":"07cf9cc6-a9ee-462a-811b-51051d221a8a","Type":"ContainerStarted","Data":"69b4b5c71bf27b824f9cb108b827b65877b11227d4bed222cd3c5979f16f7325"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.617450 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" event={"ID":"07cf9cc6-a9ee-462a-811b-51051d221a8a","Type":"ContainerStarted","Data":"458e9c14d0f7c2482bfce6d9850252101ef27d7b0ddf9688cf996370b65654e8"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.617948 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" podStartSLOduration=124.61793184 podStartE2EDuration="2m4.61793184s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:48.615400207 +0000 UTC m=+147.597347014" watchObservedRunningTime="2026-02-03 07:12:48.61793184 +0000 UTC m=+147.599878647" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.628984 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" event={"ID":"f52e0f35-b2d7-40ed-8e44-3c4408657eb0","Type":"ContainerStarted","Data":"47b0ba5ab5dd99c576a51a623ba5980058546c7d1fd5c9fdd0afcfeda9755a51"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.641478 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" event={"ID":"5711cd35-18b6-4bbb-80a2-485ab5dbdda4","Type":"ContainerStarted","Data":"f34596e7883006cdb4b44376718bd21213eb7e134961083086a49d8d85ab8c62"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.666125 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" podStartSLOduration=124.666111021 podStartE2EDuration="2m4.666111021s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:48.665058064 +0000 UTC m=+147.647004871" watchObservedRunningTime="2026-02-03 07:12:48.666111021 +0000 UTC m=+147.648057828" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.673978 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" event={"ID":"abf360a5-e982-4b3e-a814-511d57e9073f","Type":"ContainerStarted","Data":"22fb499d4a486859b23020885034a62db3da70357c6f89730f45a1dbae2b211f"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.683343 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.684435 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:49.184422991 +0000 UTC m=+148.166369798 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.717927 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" event={"ID":"915701ce-919a-4743-b390-fa72105516e1","Type":"ContainerStarted","Data":"e8da7592275ed197ed0dfeec501b8f62e4dbaf4d31f68d6b38209a8074330f7b"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.719043 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" podStartSLOduration=124.719022629 podStartE2EDuration="2m4.719022629s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:48.718819904 +0000 UTC m=+147.700766711" watchObservedRunningTime="2026-02-03 07:12:48.719022629 +0000 UTC m=+147.700969436" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.780589 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-htktd" event={"ID":"1131acfa-a9db-4b5b-9f84-77f92597f69c","Type":"ContainerStarted","Data":"31a48901c116599084c9c867d68f4d2b791ae52c7b353c4b5fbdab9d55976632"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.784965 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.786704 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:49.286681779 +0000 UTC m=+148.268628586 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.812613 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5tffj" event={"ID":"df8a6753-1332-4db6-b738-020474d60851","Type":"ContainerStarted","Data":"6dfaf429e7077b7b7c491fd3987000a7cd02121fc4d0230648de09cb0bb631b8"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.823627 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" event={"ID":"3764ba0a-05b2-442a-96ef-0534f4a1aca0","Type":"ContainerStarted","Data":"17cbd8e7e3fa2ef2283ad3c811e59b922176076ba8cfc50b7c840cc2914acbab"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.824466 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.826336 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" event={"ID":"1d8be75c-bac6-4a91-b9b9-19cf893ad193","Type":"ContainerStarted","Data":"fe91a07f34831ad83d5d5a035dc4a08a693ad40e297ab99765f1116d00aa6515"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.826359 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" event={"ID":"1d8be75c-bac6-4a91-b9b9-19cf893ad193","Type":"ContainerStarted","Data":"2f7d19b399f2e70b2c2721199563c8abf0ebf12a00baec76e373069985bc8416"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.828354 4708 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-qwdnv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.41:8443/healthz\": dial tcp 10.217.0.41:8443: connect: connection refused" start-of-body= Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.828388 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" podUID="3764ba0a-05b2-442a-96ef-0534f4a1aca0" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.41:8443/healthz\": dial tcp 10.217.0.41:8443: connect: connection refused" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.845386 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8xvkk" podStartSLOduration=124.845369473 podStartE2EDuration="2m4.845369473s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:48.763804753 +0000 UTC m=+147.745751560" watchObservedRunningTime="2026-02-03 07:12:48.845369473 +0000 UTC m=+147.827316280" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.855249 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" event={"ID":"987304c9-45fa-40ab-a687-528d1e8f69d3","Type":"ContainerStarted","Data":"aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.856096 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.856939 4708 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-748cf container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.856984 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" podUID="987304c9-45fa-40ab-a687-528d1e8f69d3" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.882368 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" event={"ID":"7b4e0874-30a8-4393-9864-cce370b40d8a","Type":"ContainerStarted","Data":"216f039b4eb1e77003eb123de221d352e2cf79c2d5b42c658c1f3428c18b4d44"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.883807 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p5qw9" podStartSLOduration=124.883778757 podStartE2EDuration="2m4.883778757s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:48.846195653 +0000 UTC m=+147.828142460" watchObservedRunningTime="2026-02-03 07:12:48.883778757 +0000 UTC m=+147.865725554" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.884984 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-2wrb6" podStartSLOduration=124.884978367 podStartE2EDuration="2m4.884978367s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:48.882661648 +0000 UTC m=+147.864608455" watchObservedRunningTime="2026-02-03 07:12:48.884978367 +0000 UTC m=+147.866925174" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.886926 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.889733 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:49.389720026 +0000 UTC m=+148.371666833 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.904283 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" event={"ID":"ed81cda7-7d47-476a-8a51-029a15af8417","Type":"ContainerStarted","Data":"9996974aa03e97ca85f0c1c6e5eefc7b24f0d6e9597bd1dd18edb8ad86713f27"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.904329 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" event={"ID":"ed81cda7-7d47-476a-8a51-029a15af8417","Type":"ContainerStarted","Data":"7d41f36b542fad67a7cd66d67b5f6af079c148f5a076505967e05488ab55e201"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.941269 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" event={"ID":"6d0028e2-24a1-479d-805d-9ac66cfdd68a","Type":"ContainerStarted","Data":"b93aa214f2de7c58380b8eee0017330bc789a7aecbef91a4f70f398deefcbf5a"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.942600 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8xpd7" podStartSLOduration=124.942585604 podStartE2EDuration="2m4.942585604s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:48.940518322 +0000 UTC m=+147.922465129" watchObservedRunningTime="2026-02-03 07:12:48.942585604 +0000 UTC m=+147.924532401" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.957453 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" event={"ID":"f4facedc-7187-46c6-b930-27c347e61b05","Type":"ContainerStarted","Data":"ead3a59f13ff13d58f2d0e2632cff8dbbd2d8d2ff74e50a1cdf1e85d18ce6013"} Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.958375 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.987704 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.987823 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" event={"ID":"7ec92dcf-aef5-49fd-9d97-ccf3c79decd8","Type":"ContainerStarted","Data":"c35f683a67e3f530b4ffc7087ea0079d19343f7cebf3e4569f801b4a015e9f39"} Feb 03 07:12:48 crc kubenswrapper[4708]: E0203 07:12:48.988605 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:49.488590139 +0000 UTC m=+148.470536946 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:48 crc kubenswrapper[4708]: I0203 07:12:48.988711 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-mts5h" podStartSLOduration=124.988697512 podStartE2EDuration="2m4.988697512s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:48.987119842 +0000 UTC m=+147.969066649" watchObservedRunningTime="2026-02-03 07:12:48.988697512 +0000 UTC m=+147.970644319" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.034928 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" event={"ID":"12242d4a-0312-44a6-8283-9794875efda5","Type":"ContainerStarted","Data":"d85feeb385e26025c52f7910711518a8032894b51c0468214cdfc3252b1281ac"} Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.034966 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" event={"ID":"12242d4a-0312-44a6-8283-9794875efda5","Type":"ContainerStarted","Data":"1a0dbf2ab58224b25d71e595f0b4c5ae4ffedbd26258b30bc0a8b036b72c0ee0"} Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.050929 4708 patch_prober.go:28] interesting pod/downloads-7954f5f757-c2mlc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.050988 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-c2mlc" podUID="7ee20271-e2ce-4476-a011-5e00e19126bf" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.061124 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-mkbsf" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.076893 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b9npd" podStartSLOduration=125.076876516 podStartE2EDuration="2m5.076876516s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:49.075253895 +0000 UTC m=+148.057200692" watchObservedRunningTime="2026-02-03 07:12:49.076876516 +0000 UTC m=+148.058823323" Feb 03 07:12:49 crc kubenswrapper[4708]: E0203 07:12:49.093233 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:49.593222207 +0000 UTC m=+148.575169014 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.093467 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.134699 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:49 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:49 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:49 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.134748 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.171592 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-xcbl9" podStartSLOduration=125.171574495 podStartE2EDuration="2m5.171574495s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:49.096776686 +0000 UTC m=+148.078723493" watchObservedRunningTime="2026-02-03 07:12:49.171574495 +0000 UTC m=+148.153521312" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.173263 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" podStartSLOduration=125.173257017 podStartE2EDuration="2m5.173257017s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:49.171433491 +0000 UTC m=+148.153380298" watchObservedRunningTime="2026-02-03 07:12:49.173257017 +0000 UTC m=+148.155203824" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.232325 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:49 crc kubenswrapper[4708]: E0203 07:12:49.233054 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:49.733036708 +0000 UTC m=+148.714983515 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.282315 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" podStartSLOduration=125.282296365 podStartE2EDuration="2m5.282296365s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:49.24308864 +0000 UTC m=+148.225035447" watchObservedRunningTime="2026-02-03 07:12:49.282296365 +0000 UTC m=+148.264243172" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.313359 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-l76sw" podStartSLOduration=125.313344175 podStartE2EDuration="2m5.313344175s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:49.28209644 +0000 UTC m=+148.264043247" watchObservedRunningTime="2026-02-03 07:12:49.313344175 +0000 UTC m=+148.295290982" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.313446 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" podStartSLOduration=125.313443657 podStartE2EDuration="2m5.313443657s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:49.312530234 +0000 UTC m=+148.294477041" watchObservedRunningTime="2026-02-03 07:12:49.313443657 +0000 UTC m=+148.295390454" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.333542 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:49 crc kubenswrapper[4708]: E0203 07:12:49.334003 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:49.833983163 +0000 UTC m=+148.815930020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.411650 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j5xtp" podStartSLOduration=125.411632093 podStartE2EDuration="2m5.411632093s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:49.397961299 +0000 UTC m=+148.379908106" watchObservedRunningTime="2026-02-03 07:12:49.411632093 +0000 UTC m=+148.393578890" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.436455 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:49 crc kubenswrapper[4708]: E0203 07:12:49.437077 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:49.937060941 +0000 UTC m=+148.919007738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.487181 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tdn7n" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.529905 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xvf6j" podStartSLOduration=125.529887583 podStartE2EDuration="2m5.529887583s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:49.469000203 +0000 UTC m=+148.450947010" watchObservedRunningTime="2026-02-03 07:12:49.529887583 +0000 UTC m=+148.511834400" Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.539431 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:49 crc kubenswrapper[4708]: E0203 07:12:49.539727 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.03971668 +0000 UTC m=+149.021663477 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.641684 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:49 crc kubenswrapper[4708]: E0203 07:12:49.642044 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.142026899 +0000 UTC m=+149.123973706 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.742918 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:49 crc kubenswrapper[4708]: E0203 07:12:49.743520 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.243509037 +0000 UTC m=+149.225455844 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.844322 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:49 crc kubenswrapper[4708]: E0203 07:12:49.844705 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.344688638 +0000 UTC m=+149.326635445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.945966 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:49 crc kubenswrapper[4708]: E0203 07:12:49.946245 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.446232918 +0000 UTC m=+149.428179725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.958585 4708 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-v447t container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.15:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 03 07:12:49 crc kubenswrapper[4708]: I0203 07:12:49.958656 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" podUID="f4facedc-7187-46c6-b930-27c347e61b05" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.15:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.039450 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-htktd" event={"ID":"1131acfa-a9db-4b5b-9f84-77f92597f69c","Type":"ContainerStarted","Data":"ea33abf121934f3ade68ec90fa19e827e4af3f9ed1c236c79358d2cea60d2ab9"} Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.039582 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-htktd" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.041926 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" event={"ID":"c4d3118a-28d4-403b-95c3-f2a11c14846d","Type":"ContainerStarted","Data":"f3cc5d02822dba88cc0ca345fc67b0838cb2d32ca94bf6f1a24714e13e8b5a01"} Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.041965 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" event={"ID":"c4d3118a-28d4-403b-95c3-f2a11c14846d","Type":"ContainerStarted","Data":"4e418a164e076a78d317d8029b21bda523be1cbfe42d391f4a2a9366c1ad5f0d"} Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.044060 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" event={"ID":"7b4e0874-30a8-4393-9864-cce370b40d8a","Type":"ContainerStarted","Data":"44d6ee8a38730fb4f83118d1884dc896c1ce585fa6f25543a1d1bd9425593057"} Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.046216 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-btjfc" event={"ID":"b81e6fc2-4131-40e0-994a-73435d2a4cbe","Type":"ContainerStarted","Data":"584c3a7ca627a260a165799726653a0d99d036df641342b2d26b5fdce984de0b"} Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.046393 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.046573 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.546534207 +0000 UTC m=+149.528481024 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.046632 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.047032 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.547018029 +0000 UTC m=+149.528964886 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.047992 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" event={"ID":"3bb818a6-b7dd-4e6a-b767-394bca081222","Type":"ContainerStarted","Data":"2a07dc84e02350dee0b40e6d05d26943993a73df71643e29278315797b003036"} Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.049268 4708 generic.go:334] "Generic (PLEG): container finished" podID="ead3a61c-4b09-4f98-866a-1e66ed92d084" containerID="942332b768abde049c122f3277e1c2843ed1c90c11c281184c9c75022ffb48e5" exitCode=0 Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.049403 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" event={"ID":"ead3a61c-4b09-4f98-866a-1e66ed92d084","Type":"ContainerDied","Data":"942332b768abde049c122f3277e1c2843ed1c90c11c281184c9c75022ffb48e5"} Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.050896 4708 patch_prober.go:28] interesting pod/downloads-7954f5f757-c2mlc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.050960 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-c2mlc" podUID="7ee20271-e2ce-4476-a011-5e00e19126bf" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.051950 4708 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-t2lvz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.052011 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" podUID="00c9d661-6c2e-48e7-9747-1476d52290a8" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.058519 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.063208 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qwdnv" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.106731 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:50 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:50 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:50 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.106815 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.137928 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-htktd" podStartSLOduration=8.137912782 podStartE2EDuration="8.137912782s" podCreationTimestamp="2026-02-03 07:12:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:50.089225219 +0000 UTC m=+149.071172026" watchObservedRunningTime="2026-02-03 07:12:50.137912782 +0000 UTC m=+149.119859589" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.147312 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.147471 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.647449562 +0000 UTC m=+149.629396369 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.149362 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.149774 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.649767459 +0000 UTC m=+149.631714266 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.225752 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfpkc" podStartSLOduration=126.225735008 podStartE2EDuration="2m6.225735008s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:50.141920843 +0000 UTC m=+149.123867660" watchObservedRunningTime="2026-02-03 07:12:50.225735008 +0000 UTC m=+149.207681815" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.253803 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.254150 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.754134051 +0000 UTC m=+149.736080858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.271896 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" podStartSLOduration=126.271881286 podStartE2EDuration="2m6.271881286s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:50.269932437 +0000 UTC m=+149.251879244" watchObservedRunningTime="2026-02-03 07:12:50.271881286 +0000 UTC m=+149.253828093" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.355086 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.355479 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.855467195 +0000 UTC m=+149.837414002 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.360207 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" podStartSLOduration=126.360191004 podStartE2EDuration="2m6.360191004s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:50.355597158 +0000 UTC m=+149.337543965" watchObservedRunningTime="2026-02-03 07:12:50.360191004 +0000 UTC m=+149.342137811" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.456063 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.456262 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.956245896 +0000 UTC m=+149.938192703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.456330 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.456565 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:50.956559544 +0000 UTC m=+149.938506351 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.541042 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-v447t" Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.556920 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.557266 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.057242052 +0000 UTC m=+150.039188859 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.557432 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.557808 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.057786096 +0000 UTC m=+150.039732903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.659129 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.659653 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.159633333 +0000 UTC m=+150.141580140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.760579 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.760863 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.260849886 +0000 UTC m=+150.242796693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.862612 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.862916 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.362899639 +0000 UTC m=+150.344846446 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:50 crc kubenswrapper[4708]: I0203 07:12:50.964369 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:50 crc kubenswrapper[4708]: E0203 07:12:50.964684 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.464674314 +0000 UTC m=+150.446621111 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.055625 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-btjfc" event={"ID":"b81e6fc2-4131-40e0-994a-73435d2a4cbe","Type":"ContainerStarted","Data":"ac4c7d44110bddfbb88fcb8abf345c519f31df3ea265fc24e853efa1a8c292aa"} Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.055678 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-btjfc" event={"ID":"b81e6fc2-4131-40e0-994a-73435d2a4cbe","Type":"ContainerStarted","Data":"7794c15a88211fc5abd226f702c5753515c5516012dfeb4b857482d16e32b3b8"} Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.056126 4708 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-t2lvz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.056173 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" podUID="00c9d661-6c2e-48e7-9747-1476d52290a8" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.064962 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.065126 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.565100486 +0000 UTC m=+150.547047293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.065243 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.065587 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.565579788 +0000 UTC m=+150.547526595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.101172 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:51 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:51 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:51 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.101328 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.166827 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.177353 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.677329244 +0000 UTC m=+150.659276051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.269096 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.269483 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.769467888 +0000 UTC m=+150.751414695 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.340391 4708 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.367807 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.373742 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.373931 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.873908232 +0000 UTC m=+150.855855029 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.374062 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.374390 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.874381673 +0000 UTC m=+150.856328650 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.475065 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ead3a61c-4b09-4f98-866a-1e66ed92d084-secret-volume\") pod \"ead3a61c-4b09-4f98-866a-1e66ed92d084\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.475135 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6fqj\" (UniqueName: \"kubernetes.io/projected/ead3a61c-4b09-4f98-866a-1e66ed92d084-kube-api-access-p6fqj\") pod \"ead3a61c-4b09-4f98-866a-1e66ed92d084\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.475228 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ead3a61c-4b09-4f98-866a-1e66ed92d084-config-volume\") pod \"ead3a61c-4b09-4f98-866a-1e66ed92d084\" (UID: \"ead3a61c-4b09-4f98-866a-1e66ed92d084\") " Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.475330 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.475528 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.975508723 +0000 UTC m=+150.957455530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.475605 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.475924 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:51.975915793 +0000 UTC m=+150.957862600 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.476168 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ead3a61c-4b09-4f98-866a-1e66ed92d084-config-volume" (OuterVolumeSpecName: "config-volume") pod "ead3a61c-4b09-4f98-866a-1e66ed92d084" (UID: "ead3a61c-4b09-4f98-866a-1e66ed92d084"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.482565 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ead3a61c-4b09-4f98-866a-1e66ed92d084-kube-api-access-p6fqj" (OuterVolumeSpecName: "kube-api-access-p6fqj") pod "ead3a61c-4b09-4f98-866a-1e66ed92d084" (UID: "ead3a61c-4b09-4f98-866a-1e66ed92d084"). InnerVolumeSpecName "kube-api-access-p6fqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.483811 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gtjj2"] Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.484001 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ead3a61c-4b09-4f98-866a-1e66ed92d084" containerName="collect-profiles" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.484012 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="ead3a61c-4b09-4f98-866a-1e66ed92d084" containerName="collect-profiles" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.484111 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="ead3a61c-4b09-4f98-866a-1e66ed92d084" containerName="collect-profiles" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.484733 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.491519 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.495387 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ead3a61c-4b09-4f98-866a-1e66ed92d084-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ead3a61c-4b09-4f98-866a-1e66ed92d084" (UID: "ead3a61c-4b09-4f98-866a-1e66ed92d084"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.500014 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gtjj2"] Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.576311 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.576522 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.076492488 +0000 UTC m=+151.058439295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.576577 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv5lx\" (UniqueName: \"kubernetes.io/projected/267fbe93-1af6-4a87-9720-c9d5cae93c91-kube-api-access-nv5lx\") pod \"community-operators-gtjj2\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.576648 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.576678 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-catalog-content\") pod \"community-operators-gtjj2\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.576765 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-utilities\") pod \"community-operators-gtjj2\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.577077 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.077065143 +0000 UTC m=+151.059012020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.577122 4708 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ead3a61c-4b09-4f98-866a-1e66ed92d084-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.577150 4708 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ead3a61c-4b09-4f98-866a-1e66ed92d084-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.577161 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6fqj\" (UniqueName: \"kubernetes.io/projected/ead3a61c-4b09-4f98-866a-1e66ed92d084-kube-api-access-p6fqj\") on node \"crc\" DevicePath \"\"" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.678404 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.678551 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.178526941 +0000 UTC m=+151.160473748 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.678596 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv5lx\" (UniqueName: \"kubernetes.io/projected/267fbe93-1af6-4a87-9720-c9d5cae93c91-kube-api-access-nv5lx\") pod \"community-operators-gtjj2\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.678624 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.678642 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-catalog-content\") pod \"community-operators-gtjj2\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.678672 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-utilities\") pod \"community-operators-gtjj2\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.678972 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.178961342 +0000 UTC m=+151.160908149 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.679143 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-utilities\") pod \"community-operators-gtjj2\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.679244 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-catalog-content\") pod \"community-operators-gtjj2\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.691497 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7d8rt"] Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.692701 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.694502 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.700618 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv5lx\" (UniqueName: \"kubernetes.io/projected/267fbe93-1af6-4a87-9720-c9d5cae93c91-kube-api-access-nv5lx\") pod \"community-operators-gtjj2\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.703166 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7d8rt"] Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.779707 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.779886 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.279858815 +0000 UTC m=+151.261805622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.779948 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.780013 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.780067 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.780458 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.28041975 +0000 UTC m=+151.262366547 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.781028 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.782723 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.806210 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.878675 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pxgjb"] Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.880138 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.880668 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.880843 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.380823132 +0000 UTC m=+151.362769939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.880993 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.881032 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxj5g\" (UniqueName: \"kubernetes.io/projected/22892077-113b-4859-81cb-9ec0e6fc60ea-kube-api-access-mxj5g\") pod \"certified-operators-7d8rt\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.881060 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.881086 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-catalog-content\") pod \"certified-operators-7d8rt\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.881114 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-utilities\") pod \"certified-operators-7d8rt\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.881216 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.881630 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.381612021 +0000 UTC m=+151.363558828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.885715 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.891557 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pxgjb"] Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.894622 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.912052 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.982453 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.982584 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.482554956 +0000 UTC m=+151.464501763 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.982703 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-669sk\" (UniqueName: \"kubernetes.io/projected/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-kube-api-access-669sk\") pod \"community-operators-pxgjb\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.982746 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxj5g\" (UniqueName: \"kubernetes.io/projected/22892077-113b-4859-81cb-9ec0e6fc60ea-kube-api-access-mxj5g\") pod \"certified-operators-7d8rt\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.982778 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-utilities\") pod \"community-operators-pxgjb\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.982828 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-catalog-content\") pod \"certified-operators-7d8rt\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.982865 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-utilities\") pod \"certified-operators-7d8rt\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.982904 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-catalog-content\") pod \"community-operators-pxgjb\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.982936 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.991392 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-catalog-content\") pod \"certified-operators-7d8rt\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:51 crc kubenswrapper[4708]: I0203 07:12:51.992143 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-utilities\") pod \"certified-operators-7d8rt\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:51 crc kubenswrapper[4708]: E0203 07:12:51.992574 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.492528476 +0000 UTC m=+151.474475303 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.016480 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.023384 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.025071 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxj5g\" (UniqueName: \"kubernetes.io/projected/22892077-113b-4859-81cb-9ec0e6fc60ea-kube-api-access-mxj5g\") pod \"certified-operators-7d8rt\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.070489 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gtjj2"] Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.089108 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5ldx7"] Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.096216 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.096498 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-669sk\" (UniqueName: \"kubernetes.io/projected/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-kube-api-access-669sk\") pod \"community-operators-pxgjb\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.096540 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-utilities\") pod \"community-operators-pxgjb\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.096632 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-catalog-content\") pod \"community-operators-pxgjb\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.097156 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-catalog-content\") pod \"community-operators-pxgjb\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:52 crc kubenswrapper[4708]: E0203 07:12:52.097242 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.597224366 +0000 UTC m=+151.579171173 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.097711 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-utilities\") pod \"community-operators-pxgjb\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.102990 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:52 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:52 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:52 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.103042 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.104749 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.125064 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-669sk\" (UniqueName: \"kubernetes.io/projected/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-kube-api-access-669sk\") pod \"community-operators-pxgjb\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.133323 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5ldx7"] Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.143055 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-btjfc" event={"ID":"b81e6fc2-4131-40e0-994a-73435d2a4cbe","Type":"ContainerStarted","Data":"63a5259dc9da771bf1e65407443a85c288085d7b9759593e56294c19517d917d"} Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.167377 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.167494 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc" event={"ID":"ead3a61c-4b09-4f98-866a-1e66ed92d084","Type":"ContainerDied","Data":"020dacc38d0650a08e8015258bc13086af8fee2824476ebe6b8e3c907f21aed8"} Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.167541 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="020dacc38d0650a08e8015258bc13086af8fee2824476ebe6b8e3c907f21aed8" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.178056 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-btjfc" podStartSLOduration=10.178032675 podStartE2EDuration="10.178032675s" podCreationTimestamp="2026-02-03 07:12:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:52.16746588 +0000 UTC m=+151.149412707" watchObservedRunningTime="2026-02-03 07:12:52.178032675 +0000 UTC m=+151.159979482" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.200691 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.200783 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-utilities\") pod \"certified-operators-5ldx7\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.200854 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fh9s\" (UniqueName: \"kubernetes.io/projected/def51730-4952-42ef-9bc1-b04ed753075c-kube-api-access-8fh9s\") pod \"certified-operators-5ldx7\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.200904 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-catalog-content\") pod \"certified-operators-5ldx7\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: E0203 07:12:52.201682 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-03 07:12:52.701670819 +0000 UTC m=+151.683617626 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nqhrk" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.222029 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:12:52 crc kubenswrapper[4708]: W0203 07:12:52.226946 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-38023638c6261f31222a24991e0e75f316a0d839e89f3523997faad2d1e0c1ab WatchSource:0}: Error finding container 38023638c6261f31222a24991e0e75f316a0d839e89f3523997faad2d1e0c1ab: Status 404 returned error can't find the container with id 38023638c6261f31222a24991e0e75f316a0d839e89f3523997faad2d1e0c1ab Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.281948 4708 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-02-03T07:12:51.340418871Z","Handler":null,"Name":""} Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.288835 4708 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.288872 4708 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.301334 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.301595 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-utilities\") pod \"certified-operators-5ldx7\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.301642 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fh9s\" (UniqueName: \"kubernetes.io/projected/def51730-4952-42ef-9bc1-b04ed753075c-kube-api-access-8fh9s\") pod \"certified-operators-5ldx7\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.301672 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-catalog-content\") pod \"certified-operators-5ldx7\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.302107 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-catalog-content\") pod \"certified-operators-5ldx7\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.302325 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-utilities\") pod \"certified-operators-5ldx7\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.311267 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.311283 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.318257 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fh9s\" (UniqueName: \"kubernetes.io/projected/def51730-4952-42ef-9bc1-b04ed753075c-kube-api-access-8fh9s\") pod \"certified-operators-5ldx7\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.404975 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.410485 4708 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.410514 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:52 crc kubenswrapper[4708]: W0203 07:12:52.425249 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-bfdf32a4da8383897101da708679914c4967034568b1d33dd975e7e49b02131d WatchSource:0}: Error finding container bfdf32a4da8383897101da708679914c4967034568b1d33dd975e7e49b02131d: Status 404 returned error can't find the container with id bfdf32a4da8383897101da708679914c4967034568b1d33dd975e7e49b02131d Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.432267 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.452867 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nqhrk\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.496563 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pxgjb"] Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.561972 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.620348 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7d8rt"] Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.710478 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5ldx7"] Feb 03 07:12:52 crc kubenswrapper[4708]: W0203 07:12:52.746253 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddef51730_4952_42ef_9bc1_b04ed753075c.slice/crio-40354e1aa916a2e967d73c174096771fb11d309ba93e878b7db7d12fd1510bc6 WatchSource:0}: Error finding container 40354e1aa916a2e967d73c174096771fb11d309ba93e878b7db7d12fd1510bc6: Status 404 returned error can't find the container with id 40354e1aa916a2e967d73c174096771fb11d309ba93e878b7db7d12fd1510bc6 Feb 03 07:12:52 crc kubenswrapper[4708]: I0203 07:12:52.817023 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nqhrk"] Feb 03 07:12:52 crc kubenswrapper[4708]: W0203 07:12:52.831335 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc35d1e2c_9135_4bff_a0af_cd20addc6134.slice/crio-7fde67b9a9ce4ffe7e2b80bae2791ba5dc7f9f04a407768d34dc26aabfaf969c WatchSource:0}: Error finding container 7fde67b9a9ce4ffe7e2b80bae2791ba5dc7f9f04a407768d34dc26aabfaf969c: Status 404 returned error can't find the container with id 7fde67b9a9ce4ffe7e2b80bae2791ba5dc7f9f04a407768d34dc26aabfaf969c Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.101528 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:53 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:53 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:53 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.101850 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.174404 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" event={"ID":"c35d1e2c-9135-4bff-a0af-cd20addc6134","Type":"ContainerStarted","Data":"9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.174445 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" event={"ID":"c35d1e2c-9135-4bff-a0af-cd20addc6134","Type":"ContainerStarted","Data":"7fde67b9a9ce4ffe7e2b80bae2791ba5dc7f9f04a407768d34dc26aabfaf969c"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.174535 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.175957 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"652e464ca21493027f6a0fcc30790f3fa68b33dd9e71392c3767e179b5652ca4"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.176005 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"38023638c6261f31222a24991e0e75f316a0d839e89f3523997faad2d1e0c1ab"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.178783 4708 generic.go:334] "Generic (PLEG): container finished" podID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerID="3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac" exitCode=0 Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.178876 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8rt" event={"ID":"22892077-113b-4859-81cb-9ec0e6fc60ea","Type":"ContainerDied","Data":"3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.178902 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8rt" event={"ID":"22892077-113b-4859-81cb-9ec0e6fc60ea","Type":"ContainerStarted","Data":"4e4094e6680d2a414e6b78744be9ab130396e5ed0a7bf501af71fed5d8b6b448"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.181229 4708 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.182384 4708 generic.go:334] "Generic (PLEG): container finished" podID="def51730-4952-42ef-9bc1-b04ed753075c" containerID="b10eadca574db351dbba1f3f8d61802ab44491334b87bb7aea1c732baa29698f" exitCode=0 Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.182590 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ldx7" event={"ID":"def51730-4952-42ef-9bc1-b04ed753075c","Type":"ContainerDied","Data":"b10eadca574db351dbba1f3f8d61802ab44491334b87bb7aea1c732baa29698f"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.182627 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ldx7" event={"ID":"def51730-4952-42ef-9bc1-b04ed753075c","Type":"ContainerStarted","Data":"40354e1aa916a2e967d73c174096771fb11d309ba93e878b7db7d12fd1510bc6"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.187346 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"578f3403b45774ea1675f8049f5761cfb2f2fea9b9b37fd6b19cb35454f80486"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.187387 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"bfdf32a4da8383897101da708679914c4967034568b1d33dd975e7e49b02131d"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.189809 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"99aa3cee99680daba67f7000fb829ba6ad9f367c77154c62ffbc8958a5934e29"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.189856 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d3a84298d92ce4461d45ee9a24a310d8c8da1dbfc46b32bb9564cbd640e1ec2b"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.190022 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.192328 4708 generic.go:334] "Generic (PLEG): container finished" podID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerID="0e7318122a46c22658bcec6d5637d24348d22c5c84423394663468c16a6d9a01" exitCode=0 Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.192429 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxgjb" event={"ID":"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1","Type":"ContainerDied","Data":"0e7318122a46c22658bcec6d5637d24348d22c5c84423394663468c16a6d9a01"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.192486 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxgjb" event={"ID":"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1","Type":"ContainerStarted","Data":"80576ba1a409469c602067b39fa85e6cb209ec4380acae81a5ded0717c8748f3"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.194352 4708 generic.go:334] "Generic (PLEG): container finished" podID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerID="3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8" exitCode=0 Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.195350 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtjj2" event={"ID":"267fbe93-1af6-4a87-9720-c9d5cae93c91","Type":"ContainerDied","Data":"3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.195385 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtjj2" event={"ID":"267fbe93-1af6-4a87-9720-c9d5cae93c91","Type":"ContainerStarted","Data":"6514529b62994a92590906716277dd9bb50f1f5e61a29b56a7189e6c5906e1de"} Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.198014 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.200466 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" podStartSLOduration=129.200444641 podStartE2EDuration="2m9.200444641s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:12:53.198067491 +0000 UTC m=+152.180014308" watchObservedRunningTime="2026-02-03 07:12:53.200444641 +0000 UTC m=+152.182391448" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.680384 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ww8kc"] Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.681492 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.683578 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.753385 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ww8kc"] Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.821009 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvtst\" (UniqueName: \"kubernetes.io/projected/70bd64d0-0ea0-4c56-9e7f-fc150343c834-kube-api-access-dvtst\") pod \"redhat-marketplace-ww8kc\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.821140 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-catalog-content\") pod \"redhat-marketplace-ww8kc\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.821189 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-utilities\") pod \"redhat-marketplace-ww8kc\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.833728 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.833851 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.922622 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvtst\" (UniqueName: \"kubernetes.io/projected/70bd64d0-0ea0-4c56-9e7f-fc150343c834-kube-api-access-dvtst\") pod \"redhat-marketplace-ww8kc\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.922674 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-catalog-content\") pod \"redhat-marketplace-ww8kc\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.922696 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-utilities\") pod \"redhat-marketplace-ww8kc\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.923182 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-utilities\") pod \"redhat-marketplace-ww8kc\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.923502 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-catalog-content\") pod \"redhat-marketplace-ww8kc\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.943990 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvtst\" (UniqueName: \"kubernetes.io/projected/70bd64d0-0ea0-4c56-9e7f-fc150343c834-kube-api-access-dvtst\") pod \"redhat-marketplace-ww8kc\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:53 crc kubenswrapper[4708]: I0203 07:12:53.995183 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.083826 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ggnln"] Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.084757 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.101521 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:54 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:54 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:54 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.101579 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.107059 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.107551 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ggnln"] Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.227724 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-utilities\") pod \"redhat-marketplace-ggnln\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.228091 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-catalog-content\") pod \"redhat-marketplace-ggnln\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.228132 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vzh4\" (UniqueName: \"kubernetes.io/projected/ebc6ef57-be3f-448d-9acc-45a042d16383-kube-api-access-5vzh4\") pod \"redhat-marketplace-ggnln\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.329144 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-catalog-content\") pod \"redhat-marketplace-ggnln\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.329215 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vzh4\" (UniqueName: \"kubernetes.io/projected/ebc6ef57-be3f-448d-9acc-45a042d16383-kube-api-access-5vzh4\") pod \"redhat-marketplace-ggnln\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.329319 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-utilities\") pod \"redhat-marketplace-ggnln\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.330316 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-catalog-content\") pod \"redhat-marketplace-ggnln\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.330824 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-utilities\") pod \"redhat-marketplace-ggnln\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.354028 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vzh4\" (UniqueName: \"kubernetes.io/projected/ebc6ef57-be3f-448d-9acc-45a042d16383-kube-api-access-5vzh4\") pod \"redhat-marketplace-ggnln\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.401717 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.424044 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.424096 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.426249 4708 patch_prober.go:28] interesting pod/console-f9d7485db-zd8kn container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.426285 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-zd8kn" podUID="46ee42c1-592d-47c3-85ba-ead60edf7aca" containerName="console" probeResult="failure" output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.499761 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ww8kc"] Feb 03 07:12:54 crc kubenswrapper[4708]: W0203 07:12:54.510828 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70bd64d0_0ea0_4c56_9e7f_fc150343c834.slice/crio-86cf98deef8ce6035410020574858cfbd2e1f1c6287e36e04b5125f2d9a07808 WatchSource:0}: Error finding container 86cf98deef8ce6035410020574858cfbd2e1f1c6287e36e04b5125f2d9a07808: Status 404 returned error can't find the container with id 86cf98deef8ce6035410020574858cfbd2e1f1c6287e36e04b5125f2d9a07808 Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.578497 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.579404 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.579547 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.581422 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.581588 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.583532 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.583576 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.589350 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.671346 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.671666 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.676307 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kclhl"] Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.682023 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ggnln"] Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.682162 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.688059 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.688709 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kclhl"] Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.688784 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 03 07:12:54 crc kubenswrapper[4708]: W0203 07:12:54.695043 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebc6ef57_be3f_448d_9acc_45a042d16383.slice/crio-dab0ca3bfc652947cb3d34f85a57fba16339b6ba1ff4e5d6be5c2281178a94e3 WatchSource:0}: Error finding container dab0ca3bfc652947cb3d34f85a57fba16339b6ba1ff4e5d6be5c2281178a94e3: Status 404 returned error can't find the container with id dab0ca3bfc652947cb3d34f85a57fba16339b6ba1ff4e5d6be5c2281178a94e3 Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.724765 4708 patch_prober.go:28] interesting pod/downloads-7954f5f757-c2mlc container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.724827 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-c2mlc" podUID="7ee20271-e2ce-4476-a011-5e00e19126bf" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.725166 4708 patch_prober.go:28] interesting pod/downloads-7954f5f757-c2mlc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.725190 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-c2mlc" podUID="7ee20271-e2ce-4476-a011-5e00e19126bf" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.736130 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/193735a9-a305-4494-9ce5-60cc62ac2768-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"193735a9-a305-4494-9ce5-60cc62ac2768\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.736257 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/193735a9-a305-4494-9ce5-60cc62ac2768-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"193735a9-a305-4494-9ce5-60cc62ac2768\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.837245 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/193735a9-a305-4494-9ce5-60cc62ac2768-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"193735a9-a305-4494-9ce5-60cc62ac2768\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.837285 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-utilities\") pod \"redhat-operators-kclhl\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.837365 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-catalog-content\") pod \"redhat-operators-kclhl\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.837405 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkbmm\" (UniqueName: \"kubernetes.io/projected/42939d12-477f-4186-9d74-1b62ca36d039-kube-api-access-tkbmm\") pod \"redhat-operators-kclhl\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.837427 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/193735a9-a305-4494-9ce5-60cc62ac2768-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"193735a9-a305-4494-9ce5-60cc62ac2768\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.837895 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/193735a9-a305-4494-9ce5-60cc62ac2768-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"193735a9-a305-4494-9ce5-60cc62ac2768\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.854910 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/193735a9-a305-4494-9ce5-60cc62ac2768-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"193735a9-a305-4494-9ce5-60cc62ac2768\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.938682 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-utilities\") pod \"redhat-operators-kclhl\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.938760 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-catalog-content\") pod \"redhat-operators-kclhl\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.938786 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkbmm\" (UniqueName: \"kubernetes.io/projected/42939d12-477f-4186-9d74-1b62ca36d039-kube-api-access-tkbmm\") pod \"redhat-operators-kclhl\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.939465 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-utilities\") pod \"redhat-operators-kclhl\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.939681 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-catalog-content\") pod \"redhat-operators-kclhl\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.954377 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkbmm\" (UniqueName: \"kubernetes.io/projected/42939d12-477f-4186-9d74-1b62ca36d039-kube-api-access-tkbmm\") pod \"redhat-operators-kclhl\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:54 crc kubenswrapper[4708]: I0203 07:12:54.958025 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.001142 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.078555 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s52xw"] Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.079859 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.094026 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s52xw"] Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.101214 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.107546 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:55 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:55 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:55 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.107696 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.187051 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.222430 4708 generic.go:334] "Generic (PLEG): container finished" podID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerID="a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49" exitCode=0 Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.222530 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggnln" event={"ID":"ebc6ef57-be3f-448d-9acc-45a042d16383","Type":"ContainerDied","Data":"a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49"} Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.222556 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggnln" event={"ID":"ebc6ef57-be3f-448d-9acc-45a042d16383","Type":"ContainerStarted","Data":"dab0ca3bfc652947cb3d34f85a57fba16339b6ba1ff4e5d6be5c2281178a94e3"} Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.223977 4708 generic.go:334] "Generic (PLEG): container finished" podID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerID="10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855" exitCode=0 Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.224020 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ww8kc" event={"ID":"70bd64d0-0ea0-4c56-9e7f-fc150343c834","Type":"ContainerDied","Data":"10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855"} Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.224037 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ww8kc" event={"ID":"70bd64d0-0ea0-4c56-9e7f-fc150343c834","Type":"ContainerStarted","Data":"86cf98deef8ce6035410020574858cfbd2e1f1c6287e36e04b5125f2d9a07808"} Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.227160 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"193735a9-a305-4494-9ce5-60cc62ac2768","Type":"ContainerStarted","Data":"7564bcb1ad4d48e99109fae2af9d5a8642c1c020a4285d53e76964d49997d3a3"} Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.228709 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kclhl"] Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.232100 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-qftz5" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.237813 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-fvbq2" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.241495 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-catalog-content\") pod \"redhat-operators-s52xw\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.241543 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wj5nv\" (UniqueName: \"kubernetes.io/projected/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-kube-api-access-wj5nv\") pod \"redhat-operators-s52xw\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.241587 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-utilities\") pod \"redhat-operators-s52xw\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.342457 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wj5nv\" (UniqueName: \"kubernetes.io/projected/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-kube-api-access-wj5nv\") pod \"redhat-operators-s52xw\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.342557 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-utilities\") pod \"redhat-operators-s52xw\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.342765 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-catalog-content\") pod \"redhat-operators-s52xw\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.344565 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-utilities\") pod \"redhat-operators-s52xw\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.345602 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-catalog-content\") pod \"redhat-operators-s52xw\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.372183 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wj5nv\" (UniqueName: \"kubernetes.io/projected/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-kube-api-access-wj5nv\") pod \"redhat-operators-s52xw\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.436060 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.501449 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:12:55 crc kubenswrapper[4708]: I0203 07:12:55.851697 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s52xw"] Feb 03 07:12:55 crc kubenswrapper[4708]: W0203 07:12:55.883437 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9b0634e_fdcd_47e4_aa53_2c972b7beb30.slice/crio-d3a120b26a73a73cfa43675b96f1a9e23cb389434c541c96ebd59dfa56338a6f WatchSource:0}: Error finding container d3a120b26a73a73cfa43675b96f1a9e23cb389434c541c96ebd59dfa56338a6f: Status 404 returned error can't find the container with id d3a120b26a73a73cfa43675b96f1a9e23cb389434c541c96ebd59dfa56338a6f Feb 03 07:12:56 crc kubenswrapper[4708]: I0203 07:12:56.101969 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:56 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:56 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:56 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:56 crc kubenswrapper[4708]: I0203 07:12:56.102229 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:56 crc kubenswrapper[4708]: I0203 07:12:56.239976 4708 generic.go:334] "Generic (PLEG): container finished" podID="42939d12-477f-4186-9d74-1b62ca36d039" containerID="8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f" exitCode=0 Feb 03 07:12:56 crc kubenswrapper[4708]: I0203 07:12:56.240030 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kclhl" event={"ID":"42939d12-477f-4186-9d74-1b62ca36d039","Type":"ContainerDied","Data":"8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f"} Feb 03 07:12:56 crc kubenswrapper[4708]: I0203 07:12:56.240056 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kclhl" event={"ID":"42939d12-477f-4186-9d74-1b62ca36d039","Type":"ContainerStarted","Data":"b28ceea3b13fff703a6eee3171f861360da60f27e9cc4da71fd68fa1cdf7434f"} Feb 03 07:12:56 crc kubenswrapper[4708]: I0203 07:12:56.243428 4708 generic.go:334] "Generic (PLEG): container finished" podID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerID="baba7e3ce0d3f39ec9787ee56b0e825cbfa64be06d4afeb50bbe3dcfedb8498c" exitCode=0 Feb 03 07:12:56 crc kubenswrapper[4708]: I0203 07:12:56.243481 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s52xw" event={"ID":"e9b0634e-fdcd-47e4-aa53-2c972b7beb30","Type":"ContainerDied","Data":"baba7e3ce0d3f39ec9787ee56b0e825cbfa64be06d4afeb50bbe3dcfedb8498c"} Feb 03 07:12:56 crc kubenswrapper[4708]: I0203 07:12:56.243504 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s52xw" event={"ID":"e9b0634e-fdcd-47e4-aa53-2c972b7beb30","Type":"ContainerStarted","Data":"d3a120b26a73a73cfa43675b96f1a9e23cb389434c541c96ebd59dfa56338a6f"} Feb 03 07:12:56 crc kubenswrapper[4708]: I0203 07:12:56.247100 4708 generic.go:334] "Generic (PLEG): container finished" podID="193735a9-a305-4494-9ce5-60cc62ac2768" containerID="95ec23794160419ca490e60086c4e81e32ae9b97b4149983c1ddd5c960cd6223" exitCode=0 Feb 03 07:12:56 crc kubenswrapper[4708]: I0203 07:12:56.248853 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"193735a9-a305-4494-9ce5-60cc62ac2768","Type":"ContainerDied","Data":"95ec23794160419ca490e60086c4e81e32ae9b97b4149983c1ddd5c960cd6223"} Feb 03 07:12:57 crc kubenswrapper[4708]: I0203 07:12:57.113214 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:57 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:57 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:57 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:57 crc kubenswrapper[4708]: I0203 07:12:57.113266 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:57 crc kubenswrapper[4708]: I0203 07:12:57.774859 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 07:12:57 crc kubenswrapper[4708]: I0203 07:12:57.886673 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/193735a9-a305-4494-9ce5-60cc62ac2768-kube-api-access\") pod \"193735a9-a305-4494-9ce5-60cc62ac2768\" (UID: \"193735a9-a305-4494-9ce5-60cc62ac2768\") " Feb 03 07:12:57 crc kubenswrapper[4708]: I0203 07:12:57.886776 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/193735a9-a305-4494-9ce5-60cc62ac2768-kubelet-dir\") pod \"193735a9-a305-4494-9ce5-60cc62ac2768\" (UID: \"193735a9-a305-4494-9ce5-60cc62ac2768\") " Feb 03 07:12:57 crc kubenswrapper[4708]: I0203 07:12:57.887095 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/193735a9-a305-4494-9ce5-60cc62ac2768-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "193735a9-a305-4494-9ce5-60cc62ac2768" (UID: "193735a9-a305-4494-9ce5-60cc62ac2768"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:12:57 crc kubenswrapper[4708]: I0203 07:12:57.944581 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/193735a9-a305-4494-9ce5-60cc62ac2768-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "193735a9-a305-4494-9ce5-60cc62ac2768" (UID: "193735a9-a305-4494-9ce5-60cc62ac2768"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:12:57 crc kubenswrapper[4708]: I0203 07:12:57.988784 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/193735a9-a305-4494-9ce5-60cc62ac2768-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 07:12:57 crc kubenswrapper[4708]: I0203 07:12:57.988833 4708 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/193735a9-a305-4494-9ce5-60cc62ac2768-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 03 07:12:58 crc kubenswrapper[4708]: I0203 07:12:58.109755 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:58 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:58 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:58 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:58 crc kubenswrapper[4708]: I0203 07:12:58.109834 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:58 crc kubenswrapper[4708]: I0203 07:12:58.330000 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"193735a9-a305-4494-9ce5-60cc62ac2768","Type":"ContainerDied","Data":"7564bcb1ad4d48e99109fae2af9d5a8642c1c020a4285d53e76964d49997d3a3"} Feb 03 07:12:58 crc kubenswrapper[4708]: I0203 07:12:58.330041 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7564bcb1ad4d48e99109fae2af9d5a8642c1c020a4285d53e76964d49997d3a3" Feb 03 07:12:58 crc kubenswrapper[4708]: I0203 07:12:58.330102 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.107221 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:12:59 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:12:59 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:12:59 crc kubenswrapper[4708]: healthz check failed Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.107568 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.150897 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 03 07:12:59 crc kubenswrapper[4708]: E0203 07:12:59.151095 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="193735a9-a305-4494-9ce5-60cc62ac2768" containerName="pruner" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.151106 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="193735a9-a305-4494-9ce5-60cc62ac2768" containerName="pruner" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.151274 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="193735a9-a305-4494-9ce5-60cc62ac2768" containerName="pruner" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.152021 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.155769 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.163612 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.163747 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.228446 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74961b6c-2198-48f9-b4f4-21ee144ddcad-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"74961b6c-2198-48f9-b4f4-21ee144ddcad\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.228515 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/74961b6c-2198-48f9-b4f4-21ee144ddcad-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"74961b6c-2198-48f9-b4f4-21ee144ddcad\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.329948 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74961b6c-2198-48f9-b4f4-21ee144ddcad-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"74961b6c-2198-48f9-b4f4-21ee144ddcad\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.330039 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/74961b6c-2198-48f9-b4f4-21ee144ddcad-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"74961b6c-2198-48f9-b4f4-21ee144ddcad\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.330756 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/74961b6c-2198-48f9-b4f4-21ee144ddcad-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"74961b6c-2198-48f9-b4f4-21ee144ddcad\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.344976 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-665b6dd947-mwrf4_3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46/cluster-samples-operator/0.log" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.345037 4708 generic.go:334] "Generic (PLEG): container finished" podID="3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46" containerID="14997e0105e641fda339800aa04be25cb5d4b9e8c5a3ba1607c8e43870b43f6a" exitCode=2 Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.345071 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" event={"ID":"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46","Type":"ContainerDied","Data":"14997e0105e641fda339800aa04be25cb5d4b9e8c5a3ba1607c8e43870b43f6a"} Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.345619 4708 scope.go:117] "RemoveContainer" containerID="14997e0105e641fda339800aa04be25cb5d4b9e8c5a3ba1607c8e43870b43f6a" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.356833 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74961b6c-2198-48f9-b4f4-21ee144ddcad-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"74961b6c-2198-48f9-b4f4-21ee144ddcad\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 07:12:59 crc kubenswrapper[4708]: I0203 07:12:59.484339 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 07:13:00 crc kubenswrapper[4708]: I0203 07:13:00.075673 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 03 07:13:00 crc kubenswrapper[4708]: I0203 07:13:00.102465 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:13:00 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:13:00 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:13:00 crc kubenswrapper[4708]: healthz check failed Feb 03 07:13:00 crc kubenswrapper[4708]: I0203 07:13:00.102550 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:13:00 crc kubenswrapper[4708]: W0203 07:13:00.149977 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod74961b6c_2198_48f9_b4f4_21ee144ddcad.slice/crio-55445423dbd576cfce43b78ee802cceafe5674175ce027bc427839960ca9690a WatchSource:0}: Error finding container 55445423dbd576cfce43b78ee802cceafe5674175ce027bc427839960ca9690a: Status 404 returned error can't find the container with id 55445423dbd576cfce43b78ee802cceafe5674175ce027bc427839960ca9690a Feb 03 07:13:00 crc kubenswrapper[4708]: I0203 07:13:00.407123 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-665b6dd947-mwrf4_3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46/cluster-samples-operator/0.log" Feb 03 07:13:00 crc kubenswrapper[4708]: I0203 07:13:00.407499 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mwrf4" event={"ID":"3a0d22e9-2f9a-4a91-85b8-7ad55bff4f46","Type":"ContainerStarted","Data":"5be3be0f6efd3f1e671ae279b9cd9272d71ef682c39c8bf1c0c8e399e5959825"} Feb 03 07:13:00 crc kubenswrapper[4708]: I0203 07:13:00.411087 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"74961b6c-2198-48f9-b4f4-21ee144ddcad","Type":"ContainerStarted","Data":"55445423dbd576cfce43b78ee802cceafe5674175ce027bc427839960ca9690a"} Feb 03 07:13:00 crc kubenswrapper[4708]: I0203 07:13:00.532733 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-htktd" Feb 03 07:13:01 crc kubenswrapper[4708]: I0203 07:13:01.102248 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:13:01 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:13:01 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:13:01 crc kubenswrapper[4708]: healthz check failed Feb 03 07:13:01 crc kubenswrapper[4708]: I0203 07:13:01.102312 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:13:02 crc kubenswrapper[4708]: I0203 07:13:02.103286 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:13:02 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:13:02 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:13:02 crc kubenswrapper[4708]: healthz check failed Feb 03 07:13:02 crc kubenswrapper[4708]: I0203 07:13:02.103577 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:13:02 crc kubenswrapper[4708]: I0203 07:13:02.439209 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"74961b6c-2198-48f9-b4f4-21ee144ddcad","Type":"ContainerStarted","Data":"00d5326aa2a4d0ba6a9c24f0827b97a4095cb51bd38a0b58399e8e18f8163bd4"} Feb 03 07:13:02 crc kubenswrapper[4708]: I0203 07:13:02.455626 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.455605465 podStartE2EDuration="3.455605465s" podCreationTimestamp="2026-02-03 07:12:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:13:02.451608754 +0000 UTC m=+161.433555581" watchObservedRunningTime="2026-02-03 07:13:02.455605465 +0000 UTC m=+161.437552292" Feb 03 07:13:03 crc kubenswrapper[4708]: I0203 07:13:03.100835 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:13:03 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:13:03 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:13:03 crc kubenswrapper[4708]: healthz check failed Feb 03 07:13:03 crc kubenswrapper[4708]: I0203 07:13:03.101103 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:13:04 crc kubenswrapper[4708]: I0203 07:13:04.100845 4708 patch_prober.go:28] interesting pod/router-default-5444994796-zrxh2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 03 07:13:04 crc kubenswrapper[4708]: [-]has-synced failed: reason withheld Feb 03 07:13:04 crc kubenswrapper[4708]: [+]process-running ok Feb 03 07:13:04 crc kubenswrapper[4708]: healthz check failed Feb 03 07:13:04 crc kubenswrapper[4708]: I0203 07:13:04.101128 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zrxh2" podUID="f8f529d7-932a-4047-b603-f84e03fe6898" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:13:04 crc kubenswrapper[4708]: I0203 07:13:04.427147 4708 patch_prober.go:28] interesting pod/console-f9d7485db-zd8kn container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Feb 03 07:13:04 crc kubenswrapper[4708]: I0203 07:13:04.427200 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-zd8kn" podUID="46ee42c1-592d-47c3-85ba-ead60edf7aca" containerName="console" probeResult="failure" output="Get \"https://10.217.0.25:8443/health\": dial tcp 10.217.0.25:8443: connect: connection refused" Feb 03 07:13:04 crc kubenswrapper[4708]: I0203 07:13:04.456535 4708 generic.go:334] "Generic (PLEG): container finished" podID="74961b6c-2198-48f9-b4f4-21ee144ddcad" containerID="00d5326aa2a4d0ba6a9c24f0827b97a4095cb51bd38a0b58399e8e18f8163bd4" exitCode=0 Feb 03 07:13:04 crc kubenswrapper[4708]: I0203 07:13:04.456571 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"74961b6c-2198-48f9-b4f4-21ee144ddcad","Type":"ContainerDied","Data":"00d5326aa2a4d0ba6a9c24f0827b97a4095cb51bd38a0b58399e8e18f8163bd4"} Feb 03 07:13:04 crc kubenswrapper[4708]: I0203 07:13:04.746288 4708 patch_prober.go:28] interesting pod/downloads-7954f5f757-c2mlc container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Feb 03 07:13:04 crc kubenswrapper[4708]: I0203 07:13:04.746338 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-c2mlc" podUID="7ee20271-e2ce-4476-a011-5e00e19126bf" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Feb 03 07:13:04 crc kubenswrapper[4708]: I0203 07:13:04.746370 4708 patch_prober.go:28] interesting pod/downloads-7954f5f757-c2mlc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Feb 03 07:13:04 crc kubenswrapper[4708]: I0203 07:13:04.746419 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-c2mlc" podUID="7ee20271-e2ce-4476-a011-5e00e19126bf" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Feb 03 07:13:05 crc kubenswrapper[4708]: I0203 07:13:05.135360 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:13:05 crc kubenswrapper[4708]: I0203 07:13:05.138825 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-zrxh2" Feb 03 07:13:07 crc kubenswrapper[4708]: I0203 07:13:07.258762 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:13:07 crc kubenswrapper[4708]: I0203 07:13:07.271827 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/851add34-7566-4ed5-b70a-c7935eb26e4f-metrics-certs\") pod \"network-metrics-daemon-6thl9\" (UID: \"851add34-7566-4ed5-b70a-c7935eb26e4f\") " pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:13:07 crc kubenswrapper[4708]: I0203 07:13:07.519404 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6thl9" Feb 03 07:13:12 crc kubenswrapper[4708]: I0203 07:13:12.568536 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:13:14 crc kubenswrapper[4708]: I0203 07:13:14.431346 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:13:14 crc kubenswrapper[4708]: I0203 07:13:14.435812 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:13:14 crc kubenswrapper[4708]: I0203 07:13:14.729754 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-c2mlc" Feb 03 07:13:19 crc kubenswrapper[4708]: I0203 07:13:19.351397 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 07:13:19 crc kubenswrapper[4708]: I0203 07:13:19.430200 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/74961b6c-2198-48f9-b4f4-21ee144ddcad-kubelet-dir\") pod \"74961b6c-2198-48f9-b4f4-21ee144ddcad\" (UID: \"74961b6c-2198-48f9-b4f4-21ee144ddcad\") " Feb 03 07:13:19 crc kubenswrapper[4708]: I0203 07:13:19.430285 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74961b6c-2198-48f9-b4f4-21ee144ddcad-kube-api-access\") pod \"74961b6c-2198-48f9-b4f4-21ee144ddcad\" (UID: \"74961b6c-2198-48f9-b4f4-21ee144ddcad\") " Feb 03 07:13:19 crc kubenswrapper[4708]: I0203 07:13:19.430344 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/74961b6c-2198-48f9-b4f4-21ee144ddcad-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "74961b6c-2198-48f9-b4f4-21ee144ddcad" (UID: "74961b6c-2198-48f9-b4f4-21ee144ddcad"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:13:19 crc kubenswrapper[4708]: I0203 07:13:19.430847 4708 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/74961b6c-2198-48f9-b4f4-21ee144ddcad-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:19 crc kubenswrapper[4708]: I0203 07:13:19.435140 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74961b6c-2198-48f9-b4f4-21ee144ddcad-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "74961b6c-2198-48f9-b4f4-21ee144ddcad" (UID: "74961b6c-2198-48f9-b4f4-21ee144ddcad"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:13:19 crc kubenswrapper[4708]: I0203 07:13:19.532920 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/74961b6c-2198-48f9-b4f4-21ee144ddcad-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:19 crc kubenswrapper[4708]: I0203 07:13:19.555761 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"74961b6c-2198-48f9-b4f4-21ee144ddcad","Type":"ContainerDied","Data":"55445423dbd576cfce43b78ee802cceafe5674175ce027bc427839960ca9690a"} Feb 03 07:13:19 crc kubenswrapper[4708]: I0203 07:13:19.556073 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55445423dbd576cfce43b78ee802cceafe5674175ce027bc427839960ca9690a" Feb 03 07:13:19 crc kubenswrapper[4708]: I0203 07:13:19.555902 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 03 07:13:22 crc kubenswrapper[4708]: I0203 07:13:22.059684 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 03 07:13:23 crc kubenswrapper[4708]: I0203 07:13:23.832850 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:13:23 crc kubenswrapper[4708]: I0203 07:13:23.833375 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:13:25 crc kubenswrapper[4708]: I0203 07:13:25.481943 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dwg5b" Feb 03 07:13:27 crc kubenswrapper[4708]: E0203 07:13:27.760969 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 03 07:13:27 crc kubenswrapper[4708]: E0203 07:13:27.761992 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mxj5g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-7d8rt_openshift-marketplace(22892077-113b-4859-81cb-9ec0e6fc60ea): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 07:13:27 crc kubenswrapper[4708]: E0203 07:13:27.763598 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-7d8rt" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" Feb 03 07:13:29 crc kubenswrapper[4708]: E0203 07:13:29.347548 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-7d8rt" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" Feb 03 07:13:29 crc kubenswrapper[4708]: E0203 07:13:29.422961 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 03 07:13:29 crc kubenswrapper[4708]: E0203 07:13:29.423424 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nv5lx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-gtjj2_openshift-marketplace(267fbe93-1af6-4a87-9720-c9d5cae93c91): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 07:13:29 crc kubenswrapper[4708]: E0203 07:13:29.425288 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-gtjj2" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" Feb 03 07:13:31 crc kubenswrapper[4708]: I0203 07:13:31.520129 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lfvrw"] Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.330511 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-gtjj2" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.431269 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.431533 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5vzh4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-ggnln_openshift-marketplace(ebc6ef57-be3f-448d-9acc-45a042d16383): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.433239 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-ggnln" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.443097 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.443223 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-669sk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pxgjb_openshift-marketplace(d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.444510 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-pxgjb" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.458909 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.459061 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wj5nv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-s52xw_openshift-marketplace(e9b0634e-fdcd-47e4-aa53-2c972b7beb30): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.460682 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-s52xw" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.481099 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.481266 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tkbmm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-kclhl_openshift-marketplace(42939d12-477f-4186-9d74-1b62ca36d039): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.482474 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-kclhl" podUID="42939d12-477f-4186-9d74-1b62ca36d039" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.497011 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.497174 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8fh9s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-5ldx7_openshift-marketplace(def51730-4952-42ef-9bc1-b04ed753075c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.498537 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-5ldx7" podUID="def51730-4952-42ef-9bc1-b04ed753075c" Feb 03 07:13:33 crc kubenswrapper[4708]: I0203 07:13:33.634959 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ww8kc" event={"ID":"70bd64d0-0ea0-4c56-9e7f-fc150343c834","Type":"ContainerStarted","Data":"5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85"} Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.636286 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-s52xw" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.637001 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-5ldx7" podUID="def51730-4952-42ef-9bc1-b04ed753075c" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.637062 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-kclhl" podUID="42939d12-477f-4186-9d74-1b62ca36d039" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.638985 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ggnln" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" Feb 03 07:13:33 crc kubenswrapper[4708]: E0203 07:13:33.639175 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pxgjb" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" Feb 03 07:13:33 crc kubenswrapper[4708]: I0203 07:13:33.739477 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-6thl9"] Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.643560 4708 generic.go:334] "Generic (PLEG): container finished" podID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerID="5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85" exitCode=0 Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.643643 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ww8kc" event={"ID":"70bd64d0-0ea0-4c56-9e7f-fc150343c834","Type":"ContainerDied","Data":"5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85"} Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.646123 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-6thl9" event={"ID":"851add34-7566-4ed5-b70a-c7935eb26e4f","Type":"ContainerStarted","Data":"01fb64e22160362bc068e980cdc608b9d0a693543c86428717bf6e541f2a387b"} Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.646153 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-6thl9" event={"ID":"851add34-7566-4ed5-b70a-c7935eb26e4f","Type":"ContainerStarted","Data":"3984cc004486e4ae5ef839f8d33b5e743f4815ff5b09a39d967515297391a4f6"} Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.646183 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-6thl9" event={"ID":"851add34-7566-4ed5-b70a-c7935eb26e4f","Type":"ContainerStarted","Data":"27a1fd0902bda8a801161e8f507cc8ea13a33cb72a27ed87e1113337b7ccb072"} Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.742245 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-6thl9" podStartSLOduration=170.742225981 podStartE2EDuration="2m50.742225981s" podCreationTimestamp="2026-02-03 07:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:13:34.682420949 +0000 UTC m=+193.664367756" watchObservedRunningTime="2026-02-03 07:13:34.742225981 +0000 UTC m=+193.724172788" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.748044 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 03 07:13:34 crc kubenswrapper[4708]: E0203 07:13:34.748300 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74961b6c-2198-48f9-b4f4-21ee144ddcad" containerName="pruner" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.748316 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="74961b6c-2198-48f9-b4f4-21ee144ddcad" containerName="pruner" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.748431 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="74961b6c-2198-48f9-b4f4-21ee144ddcad" containerName="pruner" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.748856 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.753641 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.753980 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.765273 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.836687 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d45905ba-6f20-4401-a729-2dd916e9b4de-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d45905ba-6f20-4401-a729-2dd916e9b4de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.836755 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d45905ba-6f20-4401-a729-2dd916e9b4de-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d45905ba-6f20-4401-a729-2dd916e9b4de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.938476 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d45905ba-6f20-4401-a729-2dd916e9b4de-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d45905ba-6f20-4401-a729-2dd916e9b4de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.938548 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d45905ba-6f20-4401-a729-2dd916e9b4de-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d45905ba-6f20-4401-a729-2dd916e9b4de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.938574 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d45905ba-6f20-4401-a729-2dd916e9b4de-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d45905ba-6f20-4401-a729-2dd916e9b4de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 07:13:34 crc kubenswrapper[4708]: I0203 07:13:34.958005 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d45905ba-6f20-4401-a729-2dd916e9b4de-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d45905ba-6f20-4401-a729-2dd916e9b4de\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 07:13:35 crc kubenswrapper[4708]: I0203 07:13:35.077509 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 07:13:35 crc kubenswrapper[4708]: I0203 07:13:35.486957 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 03 07:13:35 crc kubenswrapper[4708]: W0203 07:13:35.492514 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podd45905ba_6f20_4401_a729_2dd916e9b4de.slice/crio-b15948d325936c35ab0293bd696ffdd08f1d0f1f8baa5e75681f00f8b01ab9e8 WatchSource:0}: Error finding container b15948d325936c35ab0293bd696ffdd08f1d0f1f8baa5e75681f00f8b01ab9e8: Status 404 returned error can't find the container with id b15948d325936c35ab0293bd696ffdd08f1d0f1f8baa5e75681f00f8b01ab9e8 Feb 03 07:13:35 crc kubenswrapper[4708]: I0203 07:13:35.657003 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ww8kc" event={"ID":"70bd64d0-0ea0-4c56-9e7f-fc150343c834","Type":"ContainerStarted","Data":"eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241"} Feb 03 07:13:35 crc kubenswrapper[4708]: I0203 07:13:35.659617 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d45905ba-6f20-4401-a729-2dd916e9b4de","Type":"ContainerStarted","Data":"b15948d325936c35ab0293bd696ffdd08f1d0f1f8baa5e75681f00f8b01ab9e8"} Feb 03 07:13:35 crc kubenswrapper[4708]: I0203 07:13:35.681033 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ww8kc" podStartSLOduration=2.6091644499999997 podStartE2EDuration="42.681011026s" podCreationTimestamp="2026-02-03 07:12:53 +0000 UTC" firstStartedPulling="2026-02-03 07:12:55.225364122 +0000 UTC m=+154.207310929" lastFinishedPulling="2026-02-03 07:13:35.297210698 +0000 UTC m=+194.279157505" observedRunningTime="2026-02-03 07:13:35.676389461 +0000 UTC m=+194.658336268" watchObservedRunningTime="2026-02-03 07:13:35.681011026 +0000 UTC m=+194.662957833" Feb 03 07:13:36 crc kubenswrapper[4708]: I0203 07:13:36.671846 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d45905ba-6f20-4401-a729-2dd916e9b4de","Type":"ContainerDied","Data":"a76d07386a0b144440cfac08e382aadadbd87e6cb99e0ce0d3571cfd9c73fa62"} Feb 03 07:13:36 crc kubenswrapper[4708]: I0203 07:13:36.671774 4708 generic.go:334] "Generic (PLEG): container finished" podID="d45905ba-6f20-4401-a729-2dd916e9b4de" containerID="a76d07386a0b144440cfac08e382aadadbd87e6cb99e0ce0d3571cfd9c73fa62" exitCode=0 Feb 03 07:13:37 crc kubenswrapper[4708]: I0203 07:13:37.902500 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 07:13:37 crc kubenswrapper[4708]: I0203 07:13:37.976456 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d45905ba-6f20-4401-a729-2dd916e9b4de-kube-api-access\") pod \"d45905ba-6f20-4401-a729-2dd916e9b4de\" (UID: \"d45905ba-6f20-4401-a729-2dd916e9b4de\") " Feb 03 07:13:37 crc kubenswrapper[4708]: I0203 07:13:37.976515 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d45905ba-6f20-4401-a729-2dd916e9b4de-kubelet-dir\") pod \"d45905ba-6f20-4401-a729-2dd916e9b4de\" (UID: \"d45905ba-6f20-4401-a729-2dd916e9b4de\") " Feb 03 07:13:37 crc kubenswrapper[4708]: I0203 07:13:37.976662 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d45905ba-6f20-4401-a729-2dd916e9b4de-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d45905ba-6f20-4401-a729-2dd916e9b4de" (UID: "d45905ba-6f20-4401-a729-2dd916e9b4de"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:13:37 crc kubenswrapper[4708]: I0203 07:13:37.976811 4708 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d45905ba-6f20-4401-a729-2dd916e9b4de-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:37 crc kubenswrapper[4708]: I0203 07:13:37.982417 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d45905ba-6f20-4401-a729-2dd916e9b4de-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d45905ba-6f20-4401-a729-2dd916e9b4de" (UID: "d45905ba-6f20-4401-a729-2dd916e9b4de"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:13:38 crc kubenswrapper[4708]: I0203 07:13:38.078047 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d45905ba-6f20-4401-a729-2dd916e9b4de-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:38 crc kubenswrapper[4708]: I0203 07:13:38.690402 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d45905ba-6f20-4401-a729-2dd916e9b4de","Type":"ContainerDied","Data":"b15948d325936c35ab0293bd696ffdd08f1d0f1f8baa5e75681f00f8b01ab9e8"} Feb 03 07:13:38 crc kubenswrapper[4708]: I0203 07:13:38.690444 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b15948d325936c35ab0293bd696ffdd08f1d0f1f8baa5e75681f00f8b01ab9e8" Feb 03 07:13:38 crc kubenswrapper[4708]: I0203 07:13:38.690478 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.351510 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 03 07:13:41 crc kubenswrapper[4708]: E0203 07:13:41.352090 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d45905ba-6f20-4401-a729-2dd916e9b4de" containerName="pruner" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.352109 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="d45905ba-6f20-4401-a729-2dd916e9b4de" containerName="pruner" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.352228 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="d45905ba-6f20-4401-a729-2dd916e9b4de" containerName="pruner" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.353330 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.357563 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.357570 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.362155 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.418351 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-var-lock\") pod \"installer-9-crc\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.418454 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/edb06079-66b6-4c35-8f10-8978a5692c2f-kube-api-access\") pod \"installer-9-crc\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.418524 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-kubelet-dir\") pod \"installer-9-crc\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.520247 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-kubelet-dir\") pod \"installer-9-crc\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.520328 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-var-lock\") pod \"installer-9-crc\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.520377 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/edb06079-66b6-4c35-8f10-8978a5692c2f-kube-api-access\") pod \"installer-9-crc\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.520773 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-kubelet-dir\") pod \"installer-9-crc\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.520833 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-var-lock\") pod \"installer-9-crc\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.543011 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/edb06079-66b6-4c35-8f10-8978a5692c2f-kube-api-access\") pod \"installer-9-crc\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:41 crc kubenswrapper[4708]: I0203 07:13:41.677278 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:13:42 crc kubenswrapper[4708]: I0203 07:13:42.065829 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 03 07:13:42 crc kubenswrapper[4708]: W0203 07:13:42.069357 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podedb06079_66b6_4c35_8f10_8978a5692c2f.slice/crio-1d28a51365eb37da4090c9fb3405bd3ca7095f056a99b2078d8701183ce2a861 WatchSource:0}: Error finding container 1d28a51365eb37da4090c9fb3405bd3ca7095f056a99b2078d8701183ce2a861: Status 404 returned error can't find the container with id 1d28a51365eb37da4090c9fb3405bd3ca7095f056a99b2078d8701183ce2a861 Feb 03 07:13:42 crc kubenswrapper[4708]: I0203 07:13:42.710474 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"edb06079-66b6-4c35-8f10-8978a5692c2f","Type":"ContainerStarted","Data":"1d28a51365eb37da4090c9fb3405bd3ca7095f056a99b2078d8701183ce2a861"} Feb 03 07:13:43 crc kubenswrapper[4708]: I0203 07:13:43.716598 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"edb06079-66b6-4c35-8f10-8978a5692c2f","Type":"ContainerStarted","Data":"d2a6519e725358f25407ef941b7a8a0f8f292362e9af78a1a531c1cd816057d6"} Feb 03 07:13:43 crc kubenswrapper[4708]: I0203 07:13:43.733903 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.733883966 podStartE2EDuration="2.733883966s" podCreationTimestamp="2026-02-03 07:13:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:13:43.731452372 +0000 UTC m=+202.713399189" watchObservedRunningTime="2026-02-03 07:13:43.733883966 +0000 UTC m=+202.715830773" Feb 03 07:13:43 crc kubenswrapper[4708]: I0203 07:13:43.995655 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:13:43 crc kubenswrapper[4708]: I0203 07:13:43.996100 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:13:44 crc kubenswrapper[4708]: I0203 07:13:44.883032 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:13:45 crc kubenswrapper[4708]: I0203 07:13:45.728340 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8rt" event={"ID":"22892077-113b-4859-81cb-9ec0e6fc60ea","Type":"ContainerStarted","Data":"cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc"} Feb 03 07:13:45 crc kubenswrapper[4708]: I0203 07:13:45.788367 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:13:46 crc kubenswrapper[4708]: I0203 07:13:46.735920 4708 generic.go:334] "Generic (PLEG): container finished" podID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerID="cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc" exitCode=0 Feb 03 07:13:46 crc kubenswrapper[4708]: I0203 07:13:46.735993 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8rt" event={"ID":"22892077-113b-4859-81cb-9ec0e6fc60ea","Type":"ContainerDied","Data":"cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc"} Feb 03 07:13:46 crc kubenswrapper[4708]: I0203 07:13:46.739587 4708 generic.go:334] "Generic (PLEG): container finished" podID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerID="227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3" exitCode=0 Feb 03 07:13:46 crc kubenswrapper[4708]: I0203 07:13:46.739712 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggnln" event={"ID":"ebc6ef57-be3f-448d-9acc-45a042d16383","Type":"ContainerDied","Data":"227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3"} Feb 03 07:13:47 crc kubenswrapper[4708]: I0203 07:13:47.748374 4708 generic.go:334] "Generic (PLEG): container finished" podID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerID="ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f" exitCode=0 Feb 03 07:13:47 crc kubenswrapper[4708]: I0203 07:13:47.748418 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtjj2" event={"ID":"267fbe93-1af6-4a87-9720-c9d5cae93c91","Type":"ContainerDied","Data":"ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f"} Feb 03 07:13:47 crc kubenswrapper[4708]: I0203 07:13:47.751369 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8rt" event={"ID":"22892077-113b-4859-81cb-9ec0e6fc60ea","Type":"ContainerStarted","Data":"339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd"} Feb 03 07:13:47 crc kubenswrapper[4708]: I0203 07:13:47.770283 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggnln" event={"ID":"ebc6ef57-be3f-448d-9acc-45a042d16383","Type":"ContainerStarted","Data":"468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63"} Feb 03 07:13:47 crc kubenswrapper[4708]: I0203 07:13:47.791213 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7d8rt" podStartSLOduration=2.815575345 podStartE2EDuration="56.791196592s" podCreationTimestamp="2026-02-03 07:12:51 +0000 UTC" firstStartedPulling="2026-02-03 07:12:53.180968172 +0000 UTC m=+152.162914979" lastFinishedPulling="2026-02-03 07:13:47.156589419 +0000 UTC m=+206.138536226" observedRunningTime="2026-02-03 07:13:47.787902497 +0000 UTC m=+206.769849304" watchObservedRunningTime="2026-02-03 07:13:47.791196592 +0000 UTC m=+206.773143399" Feb 03 07:13:47 crc kubenswrapper[4708]: I0203 07:13:47.811252 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ggnln" podStartSLOduration=1.81897545 podStartE2EDuration="53.811233691s" podCreationTimestamp="2026-02-03 07:12:54 +0000 UTC" firstStartedPulling="2026-02-03 07:12:55.223779683 +0000 UTC m=+154.205726490" lastFinishedPulling="2026-02-03 07:13:47.216037924 +0000 UTC m=+206.197984731" observedRunningTime="2026-02-03 07:13:47.809451292 +0000 UTC m=+206.791398099" watchObservedRunningTime="2026-02-03 07:13:47.811233691 +0000 UTC m=+206.793180498" Feb 03 07:13:48 crc kubenswrapper[4708]: I0203 07:13:48.776175 4708 generic.go:334] "Generic (PLEG): container finished" podID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerID="30f6694ec32e6d65f410ad837055f7214e721994945c535f84f23545d1d1401f" exitCode=0 Feb 03 07:13:48 crc kubenswrapper[4708]: I0203 07:13:48.776269 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxgjb" event={"ID":"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1","Type":"ContainerDied","Data":"30f6694ec32e6d65f410ad837055f7214e721994945c535f84f23545d1d1401f"} Feb 03 07:13:48 crc kubenswrapper[4708]: I0203 07:13:48.779386 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtjj2" event={"ID":"267fbe93-1af6-4a87-9720-c9d5cae93c91","Type":"ContainerStarted","Data":"dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb"} Feb 03 07:13:48 crc kubenswrapper[4708]: I0203 07:13:48.781756 4708 generic.go:334] "Generic (PLEG): container finished" podID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerID="07d7bf5d26a9ac4c103edf55e088262aceedcd24ee9e861680d1b5fef9a5238b" exitCode=0 Feb 03 07:13:48 crc kubenswrapper[4708]: I0203 07:13:48.781785 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s52xw" event={"ID":"e9b0634e-fdcd-47e4-aa53-2c972b7beb30","Type":"ContainerDied","Data":"07d7bf5d26a9ac4c103edf55e088262aceedcd24ee9e861680d1b5fef9a5238b"} Feb 03 07:13:48 crc kubenswrapper[4708]: I0203 07:13:48.851510 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gtjj2" podStartSLOduration=2.82086185 podStartE2EDuration="57.851494807s" podCreationTimestamp="2026-02-03 07:12:51 +0000 UTC" firstStartedPulling="2026-02-03 07:12:53.198648096 +0000 UTC m=+152.180594903" lastFinishedPulling="2026-02-03 07:13:48.229281053 +0000 UTC m=+207.211227860" observedRunningTime="2026-02-03 07:13:48.849342989 +0000 UTC m=+207.831289786" watchObservedRunningTime="2026-02-03 07:13:48.851494807 +0000 UTC m=+207.833441614" Feb 03 07:13:49 crc kubenswrapper[4708]: I0203 07:13:49.789927 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s52xw" event={"ID":"e9b0634e-fdcd-47e4-aa53-2c972b7beb30","Type":"ContainerStarted","Data":"8f155ee56da4d5253443274da28b7427db07fc47b3b8186af66121f4361d7c59"} Feb 03 07:13:49 crc kubenswrapper[4708]: I0203 07:13:49.792033 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxgjb" event={"ID":"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1","Type":"ContainerStarted","Data":"762a1d54785acf389216acdf2cde5c2c4d89e43fe6d2e9ead2bebb1e3ff5874b"} Feb 03 07:13:49 crc kubenswrapper[4708]: I0203 07:13:49.793609 4708 generic.go:334] "Generic (PLEG): container finished" podID="42939d12-477f-4186-9d74-1b62ca36d039" containerID="49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a" exitCode=0 Feb 03 07:13:49 crc kubenswrapper[4708]: I0203 07:13:49.793643 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kclhl" event={"ID":"42939d12-477f-4186-9d74-1b62ca36d039","Type":"ContainerDied","Data":"49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a"} Feb 03 07:13:49 crc kubenswrapper[4708]: I0203 07:13:49.813947 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s52xw" podStartSLOduration=1.77609054 podStartE2EDuration="54.813927536s" podCreationTimestamp="2026-02-03 07:12:55 +0000 UTC" firstStartedPulling="2026-02-03 07:12:56.245749498 +0000 UTC m=+155.227696305" lastFinishedPulling="2026-02-03 07:13:49.283586494 +0000 UTC m=+208.265533301" observedRunningTime="2026-02-03 07:13:49.81011895 +0000 UTC m=+208.792065757" watchObservedRunningTime="2026-02-03 07:13:49.813927536 +0000 UTC m=+208.795874343" Feb 03 07:13:50 crc kubenswrapper[4708]: I0203 07:13:50.800477 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kclhl" event={"ID":"42939d12-477f-4186-9d74-1b62ca36d039","Type":"ContainerStarted","Data":"c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c"} Feb 03 07:13:50 crc kubenswrapper[4708]: I0203 07:13:50.802310 4708 generic.go:334] "Generic (PLEG): container finished" podID="def51730-4952-42ef-9bc1-b04ed753075c" containerID="0d0698a79226b01113f4305adcd3f7c5ecca58a29b98d941a30139f66572d2f3" exitCode=0 Feb 03 07:13:50 crc kubenswrapper[4708]: I0203 07:13:50.802340 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ldx7" event={"ID":"def51730-4952-42ef-9bc1-b04ed753075c","Type":"ContainerDied","Data":"0d0698a79226b01113f4305adcd3f7c5ecca58a29b98d941a30139f66572d2f3"} Feb 03 07:13:50 crc kubenswrapper[4708]: I0203 07:13:50.826534 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kclhl" podStartSLOduration=2.635283088 podStartE2EDuration="56.826518051s" podCreationTimestamp="2026-02-03 07:12:54 +0000 UTC" firstStartedPulling="2026-02-03 07:12:56.242008293 +0000 UTC m=+155.223955100" lastFinishedPulling="2026-02-03 07:13:50.433243256 +0000 UTC m=+209.415190063" observedRunningTime="2026-02-03 07:13:50.823425141 +0000 UTC m=+209.805371948" watchObservedRunningTime="2026-02-03 07:13:50.826518051 +0000 UTC m=+209.808464858" Feb 03 07:13:50 crc kubenswrapper[4708]: I0203 07:13:50.827528 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pxgjb" podStartSLOduration=3.764098947 podStartE2EDuration="59.827520154s" podCreationTimestamp="2026-02-03 07:12:51 +0000 UTC" firstStartedPulling="2026-02-03 07:12:53.194144233 +0000 UTC m=+152.176091050" lastFinishedPulling="2026-02-03 07:13:49.25756544 +0000 UTC m=+208.239512257" observedRunningTime="2026-02-03 07:13:49.853290329 +0000 UTC m=+208.835237146" watchObservedRunningTime="2026-02-03 07:13:50.827520154 +0000 UTC m=+209.809466951" Feb 03 07:13:51 crc kubenswrapper[4708]: I0203 07:13:51.806648 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:13:51 crc kubenswrapper[4708]: I0203 07:13:51.807031 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:13:51 crc kubenswrapper[4708]: I0203 07:13:51.811986 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ldx7" event={"ID":"def51730-4952-42ef-9bc1-b04ed753075c","Type":"ContainerStarted","Data":"0383c6c88c701996590df03e6348caece044d499c0d0bb6b92da76e19e0b7163"} Feb 03 07:13:51 crc kubenswrapper[4708]: I0203 07:13:51.833978 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5ldx7" podStartSLOduration=1.827663848 podStartE2EDuration="59.83395905s" podCreationTimestamp="2026-02-03 07:12:52 +0000 UTC" firstStartedPulling="2026-02-03 07:12:53.185841205 +0000 UTC m=+152.167788012" lastFinishedPulling="2026-02-03 07:13:51.192136407 +0000 UTC m=+210.174083214" observedRunningTime="2026-02-03 07:13:51.831657388 +0000 UTC m=+210.813604195" watchObservedRunningTime="2026-02-03 07:13:51.83395905 +0000 UTC m=+210.815905857" Feb 03 07:13:51 crc kubenswrapper[4708]: I0203 07:13:51.854666 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:13:52 crc kubenswrapper[4708]: I0203 07:13:52.223138 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:13:52 crc kubenswrapper[4708]: I0203 07:13:52.223238 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:13:52 crc kubenswrapper[4708]: I0203 07:13:52.266695 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:13:52 crc kubenswrapper[4708]: I0203 07:13:52.312397 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:13:52 crc kubenswrapper[4708]: I0203 07:13:52.312453 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:13:52 crc kubenswrapper[4708]: I0203 07:13:52.347368 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:13:52 crc kubenswrapper[4708]: I0203 07:13:52.433322 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:13:52 crc kubenswrapper[4708]: I0203 07:13:52.433411 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:13:52 crc kubenswrapper[4708]: I0203 07:13:52.865757 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:13:53 crc kubenswrapper[4708]: I0203 07:13:53.483539 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-5ldx7" podUID="def51730-4952-42ef-9bc1-b04ed753075c" containerName="registry-server" probeResult="failure" output=< Feb 03 07:13:53 crc kubenswrapper[4708]: timeout: failed to connect service ":50051" within 1s Feb 03 07:13:53 crc kubenswrapper[4708]: > Feb 03 07:13:53 crc kubenswrapper[4708]: I0203 07:13:53.833393 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:13:53 crc kubenswrapper[4708]: I0203 07:13:53.833858 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:13:53 crc kubenswrapper[4708]: I0203 07:13:53.833936 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:13:53 crc kubenswrapper[4708]: I0203 07:13:53.834881 4708 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4"} pod="openshift-machine-config-operator/machine-config-daemon-r94bn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:13:53 crc kubenswrapper[4708]: I0203 07:13:53.835079 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" containerID="cri-o://24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4" gracePeriod=600 Feb 03 07:13:54 crc kubenswrapper[4708]: I0203 07:13:54.402521 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:13:54 crc kubenswrapper[4708]: I0203 07:13:54.402874 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:13:54 crc kubenswrapper[4708]: I0203 07:13:54.457655 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:13:54 crc kubenswrapper[4708]: I0203 07:13:54.830500 4708 generic.go:334] "Generic (PLEG): container finished" podID="67498414-5132-496e-9638-189f5941ace0" containerID="24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4" exitCode=0 Feb 03 07:13:54 crc kubenswrapper[4708]: I0203 07:13:54.831335 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerDied","Data":"24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4"} Feb 03 07:13:54 crc kubenswrapper[4708]: I0203 07:13:54.831372 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"b3d08046058bec9282f069dc43715820de60eb6b7be4c972144f0a3216e6f3f3"} Feb 03 07:13:54 crc kubenswrapper[4708]: I0203 07:13:54.906698 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:13:55 crc kubenswrapper[4708]: I0203 07:13:55.001751 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:13:55 crc kubenswrapper[4708]: I0203 07:13:55.001821 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:13:55 crc kubenswrapper[4708]: I0203 07:13:55.437286 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:13:55 crc kubenswrapper[4708]: I0203 07:13:55.438403 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:13:55 crc kubenswrapper[4708]: I0203 07:13:55.521883 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:13:55 crc kubenswrapper[4708]: I0203 07:13:55.876778 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:13:56 crc kubenswrapper[4708]: I0203 07:13:56.057474 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kclhl" podUID="42939d12-477f-4186-9d74-1b62ca36d039" containerName="registry-server" probeResult="failure" output=< Feb 03 07:13:56 crc kubenswrapper[4708]: timeout: failed to connect service ":50051" within 1s Feb 03 07:13:56 crc kubenswrapper[4708]: > Feb 03 07:13:56 crc kubenswrapper[4708]: I0203 07:13:56.563736 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" podUID="242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" containerName="oauth-openshift" containerID="cri-o://a913441fe669bd7247f09f337c749ffb62e2471802d62011a6482594d98ae3ac" gracePeriod=15 Feb 03 07:13:56 crc kubenswrapper[4708]: I0203 07:13:56.845017 4708 generic.go:334] "Generic (PLEG): container finished" podID="242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" containerID="a913441fe669bd7247f09f337c749ffb62e2471802d62011a6482594d98ae3ac" exitCode=0 Feb 03 07:13:56 crc kubenswrapper[4708]: I0203 07:13:56.845913 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" event={"ID":"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6","Type":"ContainerDied","Data":"a913441fe669bd7247f09f337c749ffb62e2471802d62011a6482594d98ae3ac"} Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.155297 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.188186 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-d647fcd4-gk66z"] Feb 03 07:13:57 crc kubenswrapper[4708]: E0203 07:13:57.188446 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" containerName="oauth-openshift" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.188458 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" containerName="oauth-openshift" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.188578 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" containerName="oauth-openshift" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.189059 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.211086 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-d647fcd4-gk66z"] Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229145 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-service-ca\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229203 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-error\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229235 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-serving-cert\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229263 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-policies\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229304 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-idp-0-file-data\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229332 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-ocp-branding-template\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229359 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-router-certs\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229397 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-dir\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229424 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-cliconfig\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229447 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-trusted-ca-bundle\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229476 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-provider-selection\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229513 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cdmx\" (UniqueName: \"kubernetes.io/projected/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-kube-api-access-7cdmx\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229538 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-session\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229577 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-login\") pod \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\" (UID: \"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6\") " Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229746 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229772 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-template-error\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229833 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-session\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229857 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-router-certs\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229896 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229918 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229939 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de40641b-acfd-4770-9237-68cc6d06a370-audit-dir\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229964 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229977 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.229984 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.230034 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pmzj\" (UniqueName: \"kubernetes.io/projected/de40641b-acfd-4770-9237-68cc6d06a370-kube-api-access-6pmzj\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.230058 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-service-ca\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.230080 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.230099 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-template-login\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.230129 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-audit-policies\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.230162 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.231141 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.231916 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.231975 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.232362 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.235201 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.235479 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.235682 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.235958 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.236213 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-kube-api-access-7cdmx" (OuterVolumeSpecName: "kube-api-access-7cdmx") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "kube-api-access-7cdmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.236245 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.236714 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.236936 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.238351 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" (UID: "242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332030 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-audit-policies\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332161 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332210 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-template-error\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332291 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-session\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332332 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-router-certs\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332407 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332455 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332498 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de40641b-acfd-4770-9237-68cc6d06a370-audit-dir\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332546 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332590 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332647 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pmzj\" (UniqueName: \"kubernetes.io/projected/de40641b-acfd-4770-9237-68cc6d06a370-kube-api-access-6pmzj\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332703 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-service-ca\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332757 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332846 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-template-login\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332961 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.332987 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333013 4708 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333038 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333065 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333092 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333118 4708 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333143 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333167 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333194 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333222 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cdmx\" (UniqueName: \"kubernetes.io/projected/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-kube-api-access-7cdmx\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333249 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.333273 4708 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.335690 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de40641b-acfd-4770-9237-68cc6d06a370-audit-dir\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.336022 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.336841 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.338180 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-service-ca\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.338564 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.338683 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de40641b-acfd-4770-9237-68cc6d06a370-audit-policies\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.339348 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-template-login\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.340078 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-session\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.341265 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-router-certs\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.341372 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-template-error\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.342033 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.342352 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.342492 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de40641b-acfd-4770-9237-68cc6d06a370-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.355370 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pmzj\" (UniqueName: \"kubernetes.io/projected/de40641b-acfd-4770-9237-68cc6d06a370-kube-api-access-6pmzj\") pod \"oauth-openshift-d647fcd4-gk66z\" (UID: \"de40641b-acfd-4770-9237-68cc6d06a370\") " pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.512362 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.854524 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.854546 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lfvrw" event={"ID":"242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6","Type":"ContainerDied","Data":"d00d58bad66725c312925cbecaa349f376c190402d714470a9a618153e2081cd"} Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.855030 4708 scope.go:117] "RemoveContainer" containerID="a913441fe669bd7247f09f337c749ffb62e2471802d62011a6482594d98ae3ac" Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.928812 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ggnln"] Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.929184 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ggnln" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerName="registry-server" containerID="cri-o://468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63" gracePeriod=2 Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.935424 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lfvrw"] Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.939754 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lfvrw"] Feb 03 07:13:57 crc kubenswrapper[4708]: I0203 07:13:57.964441 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-d647fcd4-gk66z"] Feb 03 07:13:57 crc kubenswrapper[4708]: W0203 07:13:57.977021 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde40641b_acfd_4770_9237_68cc6d06a370.slice/crio-f1f131ae1ad340223589313b89af5931651c5e66856163b9161f7a3c78658fed WatchSource:0}: Error finding container f1f131ae1ad340223589313b89af5931651c5e66856163b9161f7a3c78658fed: Status 404 returned error can't find the container with id f1f131ae1ad340223589313b89af5931651c5e66856163b9161f7a3c78658fed Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.105740 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6" path="/var/lib/kubelet/pods/242d8dcd-b5fb-4599-9fe7-e83b7e81ffc6/volumes" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.298952 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.350527 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-catalog-content\") pod \"ebc6ef57-be3f-448d-9acc-45a042d16383\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.350641 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vzh4\" (UniqueName: \"kubernetes.io/projected/ebc6ef57-be3f-448d-9acc-45a042d16383-kube-api-access-5vzh4\") pod \"ebc6ef57-be3f-448d-9acc-45a042d16383\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.350732 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-utilities\") pod \"ebc6ef57-be3f-448d-9acc-45a042d16383\" (UID: \"ebc6ef57-be3f-448d-9acc-45a042d16383\") " Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.351943 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-utilities" (OuterVolumeSpecName: "utilities") pod "ebc6ef57-be3f-448d-9acc-45a042d16383" (UID: "ebc6ef57-be3f-448d-9acc-45a042d16383"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.352121 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.358552 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebc6ef57-be3f-448d-9acc-45a042d16383-kube-api-access-5vzh4" (OuterVolumeSpecName: "kube-api-access-5vzh4") pod "ebc6ef57-be3f-448d-9acc-45a042d16383" (UID: "ebc6ef57-be3f-448d-9acc-45a042d16383"). InnerVolumeSpecName "kube-api-access-5vzh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.374125 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ebc6ef57-be3f-448d-9acc-45a042d16383" (UID: "ebc6ef57-be3f-448d-9acc-45a042d16383"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.453905 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebc6ef57-be3f-448d-9acc-45a042d16383-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.453972 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vzh4\" (UniqueName: \"kubernetes.io/projected/ebc6ef57-be3f-448d-9acc-45a042d16383-kube-api-access-5vzh4\") on node \"crc\" DevicePath \"\"" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.516187 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s52xw"] Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.861228 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" event={"ID":"de40641b-acfd-4770-9237-68cc6d06a370","Type":"ContainerStarted","Data":"31cb501b1a1bab40e7d13a33075808f6d1b48b8ea22c802bcd1db7ded2381ee0"} Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.861595 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" event={"ID":"de40641b-acfd-4770-9237-68cc6d06a370","Type":"ContainerStarted","Data":"f1f131ae1ad340223589313b89af5931651c5e66856163b9161f7a3c78658fed"} Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.863007 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.865611 4708 generic.go:334] "Generic (PLEG): container finished" podID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerID="468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63" exitCode=0 Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.865806 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s52xw" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerName="registry-server" containerID="cri-o://8f155ee56da4d5253443274da28b7427db07fc47b3b8186af66121f4361d7c59" gracePeriod=2 Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.865993 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggnln" event={"ID":"ebc6ef57-be3f-448d-9acc-45a042d16383","Type":"ContainerDied","Data":"468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63"} Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.866042 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggnln" event={"ID":"ebc6ef57-be3f-448d-9acc-45a042d16383","Type":"ContainerDied","Data":"dab0ca3bfc652947cb3d34f85a57fba16339b6ba1ff4e5d6be5c2281178a94e3"} Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.866066 4708 scope.go:117] "RemoveContainer" containerID="468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.866379 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ggnln" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.882724 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" podStartSLOduration=27.882710701 podStartE2EDuration="27.882710701s" podCreationTimestamp="2026-02-03 07:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:13:58.881365501 +0000 UTC m=+217.863312308" watchObservedRunningTime="2026-02-03 07:13:58.882710701 +0000 UTC m=+217.864657508" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.906683 4708 scope.go:117] "RemoveContainer" containerID="227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.945177 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ggnln"] Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.945239 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ggnln"] Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.958380 4708 scope.go:117] "RemoveContainer" containerID="a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.992732 4708 scope.go:117] "RemoveContainer" containerID="468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63" Feb 03 07:13:58 crc kubenswrapper[4708]: E0203 07:13:58.993336 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63\": container with ID starting with 468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63 not found: ID does not exist" containerID="468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.993379 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63"} err="failed to get container status \"468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63\": rpc error: code = NotFound desc = could not find container \"468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63\": container with ID starting with 468918fc4d99bbce41002f458c4cf751ac6bb5fb89e75818731d96852bef3f63 not found: ID does not exist" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.993405 4708 scope.go:117] "RemoveContainer" containerID="227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3" Feb 03 07:13:58 crc kubenswrapper[4708]: E0203 07:13:58.993829 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3\": container with ID starting with 227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3 not found: ID does not exist" containerID="227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.993870 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3"} err="failed to get container status \"227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3\": rpc error: code = NotFound desc = could not find container \"227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3\": container with ID starting with 227779308a028715aa5bb2516502821a8b03d229028f231bdab9bef4cd3f25d3 not found: ID does not exist" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.993921 4708 scope.go:117] "RemoveContainer" containerID="a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49" Feb 03 07:13:58 crc kubenswrapper[4708]: E0203 07:13:58.994203 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49\": container with ID starting with a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49 not found: ID does not exist" containerID="a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49" Feb 03 07:13:58 crc kubenswrapper[4708]: I0203 07:13:58.994224 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49"} err="failed to get container status \"a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49\": rpc error: code = NotFound desc = could not find container \"a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49\": container with ID starting with a42ff6eb35b10bab7cc18ec24c7c8ef0007f07568a0a7e6b0aa64e052aae6e49 not found: ID does not exist" Feb 03 07:13:59 crc kubenswrapper[4708]: I0203 07:13:59.049616 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-d647fcd4-gk66z" Feb 03 07:13:59 crc kubenswrapper[4708]: I0203 07:13:59.877668 4708 generic.go:334] "Generic (PLEG): container finished" podID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerID="8f155ee56da4d5253443274da28b7427db07fc47b3b8186af66121f4361d7c59" exitCode=0 Feb 03 07:13:59 crc kubenswrapper[4708]: I0203 07:13:59.877771 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s52xw" event={"ID":"e9b0634e-fdcd-47e4-aa53-2c972b7beb30","Type":"ContainerDied","Data":"8f155ee56da4d5253443274da28b7427db07fc47b3b8186af66121f4361d7c59"} Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.097448 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.100457 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" path="/var/lib/kubelet/pods/ebc6ef57-be3f-448d-9acc-45a042d16383/volumes" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.176685 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wj5nv\" (UniqueName: \"kubernetes.io/projected/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-kube-api-access-wj5nv\") pod \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.176828 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-catalog-content\") pod \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.176894 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-utilities\") pod \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\" (UID: \"e9b0634e-fdcd-47e4-aa53-2c972b7beb30\") " Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.177787 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-utilities" (OuterVolumeSpecName: "utilities") pod "e9b0634e-fdcd-47e4-aa53-2c972b7beb30" (UID: "e9b0634e-fdcd-47e4-aa53-2c972b7beb30"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.183668 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-kube-api-access-wj5nv" (OuterVolumeSpecName: "kube-api-access-wj5nv") pod "e9b0634e-fdcd-47e4-aa53-2c972b7beb30" (UID: "e9b0634e-fdcd-47e4-aa53-2c972b7beb30"). InnerVolumeSpecName "kube-api-access-wj5nv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.278135 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wj5nv\" (UniqueName: \"kubernetes.io/projected/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-kube-api-access-wj5nv\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.278174 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.548065 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e9b0634e-fdcd-47e4-aa53-2c972b7beb30" (UID: "e9b0634e-fdcd-47e4-aa53-2c972b7beb30"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.581284 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9b0634e-fdcd-47e4-aa53-2c972b7beb30-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.884216 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s52xw" event={"ID":"e9b0634e-fdcd-47e4-aa53-2c972b7beb30","Type":"ContainerDied","Data":"d3a120b26a73a73cfa43675b96f1a9e23cb389434c541c96ebd59dfa56338a6f"} Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.884277 4708 scope.go:117] "RemoveContainer" containerID="8f155ee56da4d5253443274da28b7427db07fc47b3b8186af66121f4361d7c59" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.884238 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s52xw" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.901333 4708 scope.go:117] "RemoveContainer" containerID="07d7bf5d26a9ac4c103edf55e088262aceedcd24ee9e861680d1b5fef9a5238b" Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.909502 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s52xw"] Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.925142 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s52xw"] Feb 03 07:14:00 crc kubenswrapper[4708]: I0203 07:14:00.935899 4708 scope.go:117] "RemoveContainer" containerID="baba7e3ce0d3f39ec9787ee56b0e825cbfa64be06d4afeb50bbe3dcfedb8498c" Feb 03 07:14:01 crc kubenswrapper[4708]: I0203 07:14:01.872385 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:14:02 crc kubenswrapper[4708]: I0203 07:14:02.117426 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" path="/var/lib/kubelet/pods/e9b0634e-fdcd-47e4-aa53-2c972b7beb30/volumes" Feb 03 07:14:02 crc kubenswrapper[4708]: I0203 07:14:02.268483 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:14:02 crc kubenswrapper[4708]: I0203 07:14:02.477981 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:14:02 crc kubenswrapper[4708]: I0203 07:14:02.525316 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:14:04 crc kubenswrapper[4708]: I0203 07:14:04.308989 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pxgjb"] Feb 03 07:14:04 crc kubenswrapper[4708]: I0203 07:14:04.309664 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pxgjb" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerName="registry-server" containerID="cri-o://762a1d54785acf389216acdf2cde5c2c4d89e43fe6d2e9ead2bebb1e3ff5874b" gracePeriod=2 Feb 03 07:14:04 crc kubenswrapper[4708]: I0203 07:14:04.908150 4708 generic.go:334] "Generic (PLEG): container finished" podID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerID="762a1d54785acf389216acdf2cde5c2c4d89e43fe6d2e9ead2bebb1e3ff5874b" exitCode=0 Feb 03 07:14:04 crc kubenswrapper[4708]: I0203 07:14:04.908241 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxgjb" event={"ID":"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1","Type":"ContainerDied","Data":"762a1d54785acf389216acdf2cde5c2c4d89e43fe6d2e9ead2bebb1e3ff5874b"} Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.045615 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.083162 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.191732 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.246619 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-utilities\") pod \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.246724 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-669sk\" (UniqueName: \"kubernetes.io/projected/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-kube-api-access-669sk\") pod \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.246759 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-catalog-content\") pod \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\" (UID: \"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1\") " Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.248934 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-utilities" (OuterVolumeSpecName: "utilities") pod "d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" (UID: "d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.254858 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-kube-api-access-669sk" (OuterVolumeSpecName: "kube-api-access-669sk") pod "d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" (UID: "d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1"). InnerVolumeSpecName "kube-api-access-669sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.293322 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" (UID: "d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.348253 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.348288 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.348301 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-669sk\" (UniqueName: \"kubernetes.io/projected/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1-kube-api-access-669sk\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.919504 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pxgjb" event={"ID":"d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1","Type":"ContainerDied","Data":"80576ba1a409469c602067b39fa85e6cb209ec4380acae81a5ded0717c8748f3"} Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.919926 4708 scope.go:117] "RemoveContainer" containerID="762a1d54785acf389216acdf2cde5c2c4d89e43fe6d2e9ead2bebb1e3ff5874b" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.919570 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pxgjb" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.952834 4708 scope.go:117] "RemoveContainer" containerID="30f6694ec32e6d65f410ad837055f7214e721994945c535f84f23545d1d1401f" Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.954956 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pxgjb"] Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.958571 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pxgjb"] Feb 03 07:14:05 crc kubenswrapper[4708]: I0203 07:14:05.972857 4708 scope.go:117] "RemoveContainer" containerID="0e7318122a46c22658bcec6d5637d24348d22c5c84423394663468c16a6d9a01" Feb 03 07:14:06 crc kubenswrapper[4708]: I0203 07:14:06.102981 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" path="/var/lib/kubelet/pods/d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1/volumes" Feb 03 07:14:06 crc kubenswrapper[4708]: I0203 07:14:06.708985 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5ldx7"] Feb 03 07:14:06 crc kubenswrapper[4708]: I0203 07:14:06.709238 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5ldx7" podUID="def51730-4952-42ef-9bc1-b04ed753075c" containerName="registry-server" containerID="cri-o://0383c6c88c701996590df03e6348caece044d499c0d0bb6b92da76e19e0b7163" gracePeriod=2 Feb 03 07:14:06 crc kubenswrapper[4708]: I0203 07:14:06.925738 4708 generic.go:334] "Generic (PLEG): container finished" podID="def51730-4952-42ef-9bc1-b04ed753075c" containerID="0383c6c88c701996590df03e6348caece044d499c0d0bb6b92da76e19e0b7163" exitCode=0 Feb 03 07:14:06 crc kubenswrapper[4708]: I0203 07:14:06.925811 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ldx7" event={"ID":"def51730-4952-42ef-9bc1-b04ed753075c","Type":"ContainerDied","Data":"0383c6c88c701996590df03e6348caece044d499c0d0bb6b92da76e19e0b7163"} Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.092275 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.173190 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-utilities\") pod \"def51730-4952-42ef-9bc1-b04ed753075c\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.173279 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-catalog-content\") pod \"def51730-4952-42ef-9bc1-b04ed753075c\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.173350 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fh9s\" (UniqueName: \"kubernetes.io/projected/def51730-4952-42ef-9bc1-b04ed753075c-kube-api-access-8fh9s\") pod \"def51730-4952-42ef-9bc1-b04ed753075c\" (UID: \"def51730-4952-42ef-9bc1-b04ed753075c\") " Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.174911 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-utilities" (OuterVolumeSpecName: "utilities") pod "def51730-4952-42ef-9bc1-b04ed753075c" (UID: "def51730-4952-42ef-9bc1-b04ed753075c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.177606 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/def51730-4952-42ef-9bc1-b04ed753075c-kube-api-access-8fh9s" (OuterVolumeSpecName: "kube-api-access-8fh9s") pod "def51730-4952-42ef-9bc1-b04ed753075c" (UID: "def51730-4952-42ef-9bc1-b04ed753075c"). InnerVolumeSpecName "kube-api-access-8fh9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.221152 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "def51730-4952-42ef-9bc1-b04ed753075c" (UID: "def51730-4952-42ef-9bc1-b04ed753075c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.275917 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fh9s\" (UniqueName: \"kubernetes.io/projected/def51730-4952-42ef-9bc1-b04ed753075c-kube-api-access-8fh9s\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.276200 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.276261 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/def51730-4952-42ef-9bc1-b04ed753075c-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.933949 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ldx7" event={"ID":"def51730-4952-42ef-9bc1-b04ed753075c","Type":"ContainerDied","Data":"40354e1aa916a2e967d73c174096771fb11d309ba93e878b7db7d12fd1510bc6"} Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.934231 4708 scope.go:117] "RemoveContainer" containerID="0383c6c88c701996590df03e6348caece044d499c0d0bb6b92da76e19e0b7163" Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.934037 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5ldx7" Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.955784 4708 scope.go:117] "RemoveContainer" containerID="0d0698a79226b01113f4305adcd3f7c5ecca58a29b98d941a30139f66572d2f3" Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.962350 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5ldx7"] Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.968338 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5ldx7"] Feb 03 07:14:07 crc kubenswrapper[4708]: I0203 07:14:07.986048 4708 scope.go:117] "RemoveContainer" containerID="b10eadca574db351dbba1f3f8d61802ab44491334b87bb7aea1c732baa29698f" Feb 03 07:14:08 crc kubenswrapper[4708]: I0203 07:14:08.120225 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="def51730-4952-42ef-9bc1-b04ed753075c" path="/var/lib/kubelet/pods/def51730-4952-42ef-9bc1-b04ed753075c/volumes" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.536334 4708 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537369 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537389 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537403 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537411 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537426 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="def51730-4952-42ef-9bc1-b04ed753075c" containerName="extract-utilities" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537435 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="def51730-4952-42ef-9bc1-b04ed753075c" containerName="extract-utilities" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537448 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="def51730-4952-42ef-9bc1-b04ed753075c" containerName="extract-content" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537458 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="def51730-4952-42ef-9bc1-b04ed753075c" containerName="extract-content" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537470 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerName="extract-content" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537478 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerName="extract-content" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537492 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerName="extract-utilities" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537501 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerName="extract-utilities" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537511 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerName="extract-utilities" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537520 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerName="extract-utilities" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537532 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerName="extract-content" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537539 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerName="extract-content" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537549 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerName="extract-content" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537557 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerName="extract-content" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537566 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerName="extract-utilities" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537574 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerName="extract-utilities" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537585 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537592 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.537604 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="def51730-4952-42ef-9bc1-b04ed753075c" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537613 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="def51730-4952-42ef-9bc1-b04ed753075c" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537848 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9b0634e-fdcd-47e4-aa53-2c972b7beb30" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537866 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="def51730-4952-42ef-9bc1-b04ed753075c" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537880 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5f0779c-8d4a-4ee4-bd6a-bf86b65859d1" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.537897 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebc6ef57-be3f-448d-9acc-45a042d16383" containerName="registry-server" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.538286 4708 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.538324 4708 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.538457 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.538871 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af" gracePeriod=15 Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.539272 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539302 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.539315 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539324 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.539335 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539345 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.539356 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539364 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539343 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335" gracePeriod=15 Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.539373 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539382 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.539393 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539401 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.539410 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539417 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 07:14:20 crc kubenswrapper[4708]: E0203 07:14:20.539431 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539437 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539453 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc" gracePeriod=15 Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539494 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69" gracePeriod=15 Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539601 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539612 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539621 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539633 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539642 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539648 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539600 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34" gracePeriod=15 Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.539895 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.545941 4708 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.643948 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.644314 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.644368 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.644398 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.644487 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.644665 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.644718 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.644758 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.745975 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746072 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746091 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746115 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746163 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746213 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746221 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746241 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746269 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746274 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746292 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746311 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746317 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746337 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746361 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:20 crc kubenswrapper[4708]: I0203 07:14:20.746388 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:21 crc kubenswrapper[4708]: I0203 07:14:21.012008 4708 generic.go:334] "Generic (PLEG): container finished" podID="edb06079-66b6-4c35-8f10-8978a5692c2f" containerID="d2a6519e725358f25407ef941b7a8a0f8f292362e9af78a1a531c1cd816057d6" exitCode=0 Feb 03 07:14:21 crc kubenswrapper[4708]: I0203 07:14:21.012297 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"edb06079-66b6-4c35-8f10-8978a5692c2f","Type":"ContainerDied","Data":"d2a6519e725358f25407ef941b7a8a0f8f292362e9af78a1a531c1cd816057d6"} Feb 03 07:14:21 crc kubenswrapper[4708]: I0203 07:14:21.013448 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:21 crc kubenswrapper[4708]: I0203 07:14:21.019046 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 03 07:14:21 crc kubenswrapper[4708]: I0203 07:14:21.022351 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 03 07:14:21 crc kubenswrapper[4708]: I0203 07:14:21.024148 4708 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335" exitCode=0 Feb 03 07:14:21 crc kubenswrapper[4708]: I0203 07:14:21.024385 4708 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc" exitCode=0 Feb 03 07:14:21 crc kubenswrapper[4708]: I0203 07:14:21.024618 4708 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34" exitCode=0 Feb 03 07:14:21 crc kubenswrapper[4708]: I0203 07:14:21.024825 4708 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69" exitCode=2 Feb 03 07:14:21 crc kubenswrapper[4708]: I0203 07:14:21.025102 4708 scope.go:117] "RemoveContainer" containerID="79bf5e00a4b9a4a2dc7365d5e1242512f2145b405c1d9358879d3216781d0802" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.031040 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.097894 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.284863 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.285971 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.365652 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/edb06079-66b6-4c35-8f10-8978a5692c2f-kube-api-access\") pod \"edb06079-66b6-4c35-8f10-8978a5692c2f\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.365751 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-kubelet-dir\") pod \"edb06079-66b6-4c35-8f10-8978a5692c2f\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.365813 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-var-lock\") pod \"edb06079-66b6-4c35-8f10-8978a5692c2f\" (UID: \"edb06079-66b6-4c35-8f10-8978a5692c2f\") " Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.365884 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "edb06079-66b6-4c35-8f10-8978a5692c2f" (UID: "edb06079-66b6-4c35-8f10-8978a5692c2f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.365965 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-var-lock" (OuterVolumeSpecName: "var-lock") pod "edb06079-66b6-4c35-8f10-8978a5692c2f" (UID: "edb06079-66b6-4c35-8f10-8978a5692c2f"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.366223 4708 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.366241 4708 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/edb06079-66b6-4c35-8f10-8978a5692c2f-var-lock\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.370408 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edb06079-66b6-4c35-8f10-8978a5692c2f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "edb06079-66b6-4c35-8f10-8978a5692c2f" (UID: "edb06079-66b6-4c35-8f10-8978a5692c2f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.467905 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/edb06079-66b6-4c35-8f10-8978a5692c2f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.943590 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.945038 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.945849 4708 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.946399 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.973291 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.973348 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.973374 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.973404 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.973400 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.973430 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.973681 4708 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.973698 4708 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:22 crc kubenswrapper[4708]: I0203 07:14:22.973712 4708 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.040428 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.041194 4708 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af" exitCode=0 Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.041266 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.041296 4708 scope.go:117] "RemoveContainer" containerID="200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.042828 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"edb06079-66b6-4c35-8f10-8978a5692c2f","Type":"ContainerDied","Data":"1d28a51365eb37da4090c9fb3405bd3ca7095f056a99b2078d8701183ce2a861"} Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.042860 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d28a51365eb37da4090c9fb3405bd3ca7095f056a99b2078d8701183ce2a861" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.042909 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.054455 4708 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.054910 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.057928 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.060329 4708 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.060363 4708 scope.go:117] "RemoveContainer" containerID="ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.077850 4708 scope.go:117] "RemoveContainer" containerID="fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.088928 4708 scope.go:117] "RemoveContainer" containerID="d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.102558 4708 scope.go:117] "RemoveContainer" containerID="5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.115849 4708 scope.go:117] "RemoveContainer" containerID="e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.132160 4708 scope.go:117] "RemoveContainer" containerID="200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335" Feb 03 07:14:23 crc kubenswrapper[4708]: E0203 07:14:23.138763 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\": container with ID starting with 200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335 not found: ID does not exist" containerID="200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.138837 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335"} err="failed to get container status \"200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\": rpc error: code = NotFound desc = could not find container \"200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335\": container with ID starting with 200b361a7b1d3ef0a5d05fba630cfe0727fbf9fb36199fe812935a6c43952335 not found: ID does not exist" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.138861 4708 scope.go:117] "RemoveContainer" containerID="ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc" Feb 03 07:14:23 crc kubenswrapper[4708]: E0203 07:14:23.139280 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\": container with ID starting with ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc not found: ID does not exist" containerID="ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.139327 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc"} err="failed to get container status \"ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\": rpc error: code = NotFound desc = could not find container \"ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc\": container with ID starting with ff436071debd2799bbd409954b530d16237c149325ee1d0a5c316499315f69fc not found: ID does not exist" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.139358 4708 scope.go:117] "RemoveContainer" containerID="fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34" Feb 03 07:14:23 crc kubenswrapper[4708]: E0203 07:14:23.139859 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\": container with ID starting with fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34 not found: ID does not exist" containerID="fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.139890 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34"} err="failed to get container status \"fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\": rpc error: code = NotFound desc = could not find container \"fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34\": container with ID starting with fe4dec907fc06bf2538cd24ec349d78b31710f7427397a19a2ddb787631daf34 not found: ID does not exist" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.139907 4708 scope.go:117] "RemoveContainer" containerID="d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69" Feb 03 07:14:23 crc kubenswrapper[4708]: E0203 07:14:23.140237 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\": container with ID starting with d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69 not found: ID does not exist" containerID="d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.140297 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69"} err="failed to get container status \"d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\": rpc error: code = NotFound desc = could not find container \"d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69\": container with ID starting with d8c4eaf001a580caaf083276824632b827753ab11a3d74bcb0129717f6ab0d69 not found: ID does not exist" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.140337 4708 scope.go:117] "RemoveContainer" containerID="5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af" Feb 03 07:14:23 crc kubenswrapper[4708]: E0203 07:14:23.140614 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\": container with ID starting with 5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af not found: ID does not exist" containerID="5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.140640 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af"} err="failed to get container status \"5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\": rpc error: code = NotFound desc = could not find container \"5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af\": container with ID starting with 5c1ea37233b26f1eccd3ac03d980ad533dd6fe6a40706ce8bf28fe51324346af not found: ID does not exist" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.140653 4708 scope.go:117] "RemoveContainer" containerID="e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0" Feb 03 07:14:23 crc kubenswrapper[4708]: E0203 07:14:23.140912 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\": container with ID starting with e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0 not found: ID does not exist" containerID="e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0" Feb 03 07:14:23 crc kubenswrapper[4708]: I0203 07:14:23.140937 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0"} err="failed to get container status \"e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\": rpc error: code = NotFound desc = could not find container \"e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0\": container with ID starting with e8d9727c3cab4b1b94cf1e31667deec8fb8b49b8deef898f07ae8ab90ee38de0 not found: ID does not exist" Feb 03 07:14:24 crc kubenswrapper[4708]: E0203 07:14:24.055823 4708 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:24 crc kubenswrapper[4708]: E0203 07:14:24.056092 4708 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:24 crc kubenswrapper[4708]: E0203 07:14:24.056267 4708 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:24 crc kubenswrapper[4708]: E0203 07:14:24.056456 4708 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:24 crc kubenswrapper[4708]: E0203 07:14:24.057135 4708 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:24 crc kubenswrapper[4708]: I0203 07:14:24.057163 4708 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Feb 03 07:14:24 crc kubenswrapper[4708]: E0203 07:14:24.057348 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="200ms" Feb 03 07:14:24 crc kubenswrapper[4708]: I0203 07:14:24.100720 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Feb 03 07:14:24 crc kubenswrapper[4708]: E0203 07:14:24.261368 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="400ms" Feb 03 07:14:24 crc kubenswrapper[4708]: E0203 07:14:24.662939 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="800ms" Feb 03 07:14:25 crc kubenswrapper[4708]: E0203 07:14:25.464011 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="1.6s" Feb 03 07:14:25 crc kubenswrapper[4708]: E0203 07:14:25.587384 4708 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.53:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:25 crc kubenswrapper[4708]: I0203 07:14:25.587842 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:25 crc kubenswrapper[4708]: W0203 07:14:25.609444 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-61adfd9c10413b385009f083485269a43f4ca386a366ff6e6e2e4fed37dea0a6 WatchSource:0}: Error finding container 61adfd9c10413b385009f083485269a43f4ca386a366ff6e6e2e4fed37dea0a6: Status 404 returned error can't find the container with id 61adfd9c10413b385009f083485269a43f4ca386a366ff6e6e2e4fed37dea0a6 Feb 03 07:14:25 crc kubenswrapper[4708]: E0203 07:14:25.612246 4708 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.53:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1890ab2d88b0af30 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-03 07:14:25.611763504 +0000 UTC m=+244.593710311,LastTimestamp:2026-02-03 07:14:25.611763504 +0000 UTC m=+244.593710311,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 03 07:14:26 crc kubenswrapper[4708]: I0203 07:14:26.060253 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321"} Feb 03 07:14:26 crc kubenswrapper[4708]: I0203 07:14:26.060622 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"61adfd9c10413b385009f083485269a43f4ca386a366ff6e6e2e4fed37dea0a6"} Feb 03 07:14:26 crc kubenswrapper[4708]: E0203 07:14:26.061323 4708 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.53:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:14:26 crc kubenswrapper[4708]: I0203 07:14:26.061480 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:27 crc kubenswrapper[4708]: E0203 07:14:27.065299 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="3.2s" Feb 03 07:14:30 crc kubenswrapper[4708]: E0203 07:14:30.266453 4708 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.53:6443: connect: connection refused" interval="6.4s" Feb 03 07:14:31 crc kubenswrapper[4708]: E0203 07:14:31.166497 4708 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.53:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" volumeName="registry-storage" Feb 03 07:14:32 crc kubenswrapper[4708]: I0203 07:14:32.094778 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:32 crc kubenswrapper[4708]: E0203 07:14:32.327146 4708 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.53:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1890ab2d88b0af30 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-03 07:14:25.611763504 +0000 UTC m=+244.593710311,LastTimestamp:2026-02-03 07:14:25.611763504 +0000 UTC m=+244.593710311,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 03 07:14:32 crc kubenswrapper[4708]: I0203 07:14:32.776134 4708 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Liveness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Feb 03 07:14:32 crc kubenswrapper[4708]: I0203 07:14:32.776186 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Feb 03 07:14:33 crc kubenswrapper[4708]: I0203 07:14:33.097902 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 03 07:14:33 crc kubenswrapper[4708]: I0203 07:14:33.097963 4708 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1" exitCode=1 Feb 03 07:14:33 crc kubenswrapper[4708]: I0203 07:14:33.097999 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1"} Feb 03 07:14:33 crc kubenswrapper[4708]: I0203 07:14:33.098502 4708 scope.go:117] "RemoveContainer" containerID="c4dff07a28f61095049e2d8a944b0d836eb02257f4573280cb6f76e56ea3e6e1" Feb 03 07:14:33 crc kubenswrapper[4708]: I0203 07:14:33.099239 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:33 crc kubenswrapper[4708]: I0203 07:14:33.099620 4708 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:33 crc kubenswrapper[4708]: I0203 07:14:33.514411 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:14:34 crc kubenswrapper[4708]: I0203 07:14:34.092842 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:34 crc kubenswrapper[4708]: I0203 07:14:34.093698 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:34 crc kubenswrapper[4708]: I0203 07:14:34.094369 4708 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:34 crc kubenswrapper[4708]: I0203 07:14:34.108517 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 03 07:14:34 crc kubenswrapper[4708]: I0203 07:14:34.108581 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f670f70de652da4cec14e4b263c8fddb123f1b0c60f438b65f9e49693ff032d1"} Feb 03 07:14:34 crc kubenswrapper[4708]: I0203 07:14:34.109416 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:34 crc kubenswrapper[4708]: I0203 07:14:34.110054 4708 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:34 crc kubenswrapper[4708]: I0203 07:14:34.111395 4708 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee6cd805-5c9a-49ab-a83e-3bd1437838f0" Feb 03 07:14:34 crc kubenswrapper[4708]: I0203 07:14:34.111424 4708 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee6cd805-5c9a-49ab-a83e-3bd1437838f0" Feb 03 07:14:34 crc kubenswrapper[4708]: E0203 07:14:34.112226 4708 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:34 crc kubenswrapper[4708]: I0203 07:14:34.112750 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:34 crc kubenswrapper[4708]: W0203 07:14:34.131302 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-c4ae7ec9ff4802b34677bef2778ae0daa3f0ca0a765ecceb0faa2eae552f9153 WatchSource:0}: Error finding container c4ae7ec9ff4802b34677bef2778ae0daa3f0ca0a765ecceb0faa2eae552f9153: Status 404 returned error can't find the container with id c4ae7ec9ff4802b34677bef2778ae0daa3f0ca0a765ecceb0faa2eae552f9153 Feb 03 07:14:35 crc kubenswrapper[4708]: I0203 07:14:35.116706 4708 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="78f1acec8b9c868a7a3d7b2ef7e2ad1041be86ec75fee9eeda350624db2b4de1" exitCode=0 Feb 03 07:14:35 crc kubenswrapper[4708]: I0203 07:14:35.116760 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"78f1acec8b9c868a7a3d7b2ef7e2ad1041be86ec75fee9eeda350624db2b4de1"} Feb 03 07:14:35 crc kubenswrapper[4708]: I0203 07:14:35.117152 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c4ae7ec9ff4802b34677bef2778ae0daa3f0ca0a765ecceb0faa2eae552f9153"} Feb 03 07:14:35 crc kubenswrapper[4708]: I0203 07:14:35.118599 4708 status_manager.go:851] "Failed to get status for pod" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:35 crc kubenswrapper[4708]: I0203 07:14:35.119010 4708 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee6cd805-5c9a-49ab-a83e-3bd1437838f0" Feb 03 07:14:35 crc kubenswrapper[4708]: I0203 07:14:35.119068 4708 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee6cd805-5c9a-49ab-a83e-3bd1437838f0" Feb 03 07:14:35 crc kubenswrapper[4708]: I0203 07:14:35.119132 4708 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" Feb 03 07:14:35 crc kubenswrapper[4708]: E0203 07:14:35.119589 4708 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.53:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:36 crc kubenswrapper[4708]: I0203 07:14:36.125573 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8b0cd6ba6e0a6b4844649045daaeab0e91c2ea1beac50002fe6ba3e5d40ef19f"} Feb 03 07:14:36 crc kubenswrapper[4708]: I0203 07:14:36.125918 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"dcbb4cc49175977ba752a3a7972b2220cfb558ef19ad3bc3e98fc441859a8d4b"} Feb 03 07:14:36 crc kubenswrapper[4708]: I0203 07:14:36.125962 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d947c2c1802f9e67f32c0de427266728c5c3dada392d34da85efc56cfda895a4"} Feb 03 07:14:36 crc kubenswrapper[4708]: I0203 07:14:36.125971 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"235e4659f167f8b0755a5f7583ae831a453d74373c2daf186f10d456eb588615"} Feb 03 07:14:37 crc kubenswrapper[4708]: I0203 07:14:37.132212 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"fd0655a5502c94add3feb3512a3f7cf96549431e37006fb3265610efa430fb0e"} Feb 03 07:14:37 crc kubenswrapper[4708]: I0203 07:14:37.132585 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:37 crc kubenswrapper[4708]: I0203 07:14:37.132664 4708 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee6cd805-5c9a-49ab-a83e-3bd1437838f0" Feb 03 07:14:37 crc kubenswrapper[4708]: I0203 07:14:37.132683 4708 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee6cd805-5c9a-49ab-a83e-3bd1437838f0" Feb 03 07:14:39 crc kubenswrapper[4708]: I0203 07:14:39.114060 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:39 crc kubenswrapper[4708]: I0203 07:14:39.114123 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:39 crc kubenswrapper[4708]: I0203 07:14:39.119052 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:40 crc kubenswrapper[4708]: I0203 07:14:40.372052 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:14:40 crc kubenswrapper[4708]: I0203 07:14:40.377002 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:14:41 crc kubenswrapper[4708]: I0203 07:14:41.153511 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:14:42 crc kubenswrapper[4708]: I0203 07:14:42.142041 4708 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:14:42 crc kubenswrapper[4708]: I0203 07:14:42.230358 4708 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="3c7180b9-919a-4ed3-8380-34ab1cb54981" Feb 03 07:14:43 crc kubenswrapper[4708]: I0203 07:14:43.165620 4708 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee6cd805-5c9a-49ab-a83e-3bd1437838f0" Feb 03 07:14:43 crc kubenswrapper[4708]: I0203 07:14:43.165671 4708 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee6cd805-5c9a-49ab-a83e-3bd1437838f0" Feb 03 07:14:43 crc kubenswrapper[4708]: I0203 07:14:43.170295 4708 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="3c7180b9-919a-4ed3-8380-34ab1cb54981" Feb 03 07:14:43 crc kubenswrapper[4708]: I0203 07:14:43.519186 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 03 07:14:51 crc kubenswrapper[4708]: I0203 07:14:51.255685 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 03 07:14:51 crc kubenswrapper[4708]: I0203 07:14:51.881212 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 03 07:14:52 crc kubenswrapper[4708]: I0203 07:14:52.319333 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 03 07:14:52 crc kubenswrapper[4708]: I0203 07:14:52.595433 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 03 07:14:53 crc kubenswrapper[4708]: I0203 07:14:53.012274 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 03 07:14:53 crc kubenswrapper[4708]: I0203 07:14:53.317836 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 03 07:14:53 crc kubenswrapper[4708]: I0203 07:14:53.542549 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 03 07:14:53 crc kubenswrapper[4708]: I0203 07:14:53.604663 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 03 07:14:53 crc kubenswrapper[4708]: I0203 07:14:53.783017 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 03 07:14:53 crc kubenswrapper[4708]: I0203 07:14:53.789027 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 03 07:14:53 crc kubenswrapper[4708]: I0203 07:14:53.809371 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 03 07:14:53 crc kubenswrapper[4708]: I0203 07:14:53.866257 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.152241 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.172182 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.331081 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.351042 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.389179 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.429586 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.450060 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.606956 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.873144 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.875264 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.916039 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.947152 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 03 07:14:54 crc kubenswrapper[4708]: I0203 07:14:54.962823 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.020119 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.101160 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.147720 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.233727 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.330734 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.334159 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.374970 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.384513 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.423039 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.738970 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.778995 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.887392 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.942452 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 03 07:14:55 crc kubenswrapper[4708]: I0203 07:14:55.991337 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.029205 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.079556 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.105921 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.122031 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.129181 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.271138 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.276453 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.285844 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.330088 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.347566 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.443378 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.499486 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.507115 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.521557 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.553469 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.583972 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.719396 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.745919 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.762197 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.796248 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 03 07:14:56 crc kubenswrapper[4708]: I0203 07:14:56.956405 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.073775 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.178748 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.203182 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.282748 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.345023 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.369078 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.433468 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.460068 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.502732 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.508670 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.551048 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.571052 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.717728 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.873185 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.935280 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.944095 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 03 07:14:57 crc kubenswrapper[4708]: I0203 07:14:57.945109 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.031045 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.051283 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.141700 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.172397 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.222101 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.222641 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.264512 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.401348 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.411289 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.534237 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.567096 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.642570 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.646217 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.737502 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.739419 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.791335 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.883499 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 03 07:14:58 crc kubenswrapper[4708]: I0203 07:14:58.897288 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.005186 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.042232 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.105860 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.253064 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.295933 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.315548 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.386256 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.388769 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.434762 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.500922 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.509318 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.572597 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.584475 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.629509 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.799959 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.802144 4708 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.819364 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.898289 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.904667 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.907463 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.913154 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 03 07:14:59 crc kubenswrapper[4708]: I0203 07:14:59.919662 4708 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.031179 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.053928 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.126496 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.201841 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.215575 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.226995 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.353832 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.398609 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.449482 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.495691 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.598672 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.670557 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.805780 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.818556 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 03 07:15:00 crc kubenswrapper[4708]: I0203 07:15:00.973664 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.015675 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.038752 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.101477 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.197919 4708 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.227983 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.256285 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.270196 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.446467 4708 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.455597 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.455925 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hs7h6","openshift-kube-apiserver/kube-apiserver-crc","openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj"] Feb 03 07:15:01 crc kubenswrapper[4708]: E0203 07:15:01.456228 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" containerName="installer" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.456304 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" containerName="installer" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.456663 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="edb06079-66b6-4c35-8f10-8978a5692c2f" containerName="installer" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.456775 4708 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee6cd805-5c9a-49ab-a83e-3bd1437838f0" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.456870 4708 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee6cd805-5c9a-49ab-a83e-3bd1437838f0" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.459495 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.460369 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kclhl","openshift-marketplace/marketplace-operator-79b997595-t2lvz","openshift-marketplace/community-operators-gtjj2","openshift-marketplace/certified-operators-7d8rt","openshift-marketplace/redhat-marketplace-ww8kc"] Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.464355 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.462356 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.465042 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ww8kc" podUID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerName="registry-server" containerID="cri-o://eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241" gracePeriod=30 Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.465756 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gtjj2" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerName="registry-server" containerID="cri-o://dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb" gracePeriod=30 Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.466207 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kclhl" podUID="42939d12-477f-4186-9d74-1b62ca36d039" containerName="registry-server" containerID="cri-o://c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c" gracePeriod=30 Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.466465 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7d8rt" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerName="registry-server" containerID="cri-o://339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd" gracePeriod=30 Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.466905 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" podUID="00c9d661-6c2e-48e7-9747-1476d52290a8" containerName="marketplace-operator" containerID="cri-o://35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b" gracePeriod=30 Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.469509 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.469851 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.490758 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=19.490730646 podStartE2EDuration="19.490730646s" podCreationTimestamp="2026-02-03 07:14:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:15:01.485990632 +0000 UTC m=+280.467937459" watchObservedRunningTime="2026-02-03 07:15:01.490730646 +0000 UTC m=+280.472677473" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.498111 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.525217 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.540056 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.549669 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.567543 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfn4l\" (UniqueName: \"kubernetes.io/projected/760c2ebf-e516-4db6-a500-d2b897cc96de-kube-api-access-kfn4l\") pod \"marketplace-operator-79b997595-hs7h6\" (UID: \"760c2ebf-e516-4db6-a500-d2b897cc96de\") " pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.567584 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e78f23f0-a7a1-489d-a40f-05d722cc29ec-secret-volume\") pod \"collect-profiles-29501715-pvnpj\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.567621 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e78f23f0-a7a1-489d-a40f-05d722cc29ec-config-volume\") pod \"collect-profiles-29501715-pvnpj\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.567644 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/760c2ebf-e516-4db6-a500-d2b897cc96de-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hs7h6\" (UID: \"760c2ebf-e516-4db6-a500-d2b897cc96de\") " pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.567681 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct72k\" (UniqueName: \"kubernetes.io/projected/e78f23f0-a7a1-489d-a40f-05d722cc29ec-kube-api-access-ct72k\") pod \"collect-profiles-29501715-pvnpj\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.567700 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/760c2ebf-e516-4db6-a500-d2b897cc96de-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hs7h6\" (UID: \"760c2ebf-e516-4db6-a500-d2b897cc96de\") " pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.586147 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.640749 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.668589 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e78f23f0-a7a1-489d-a40f-05d722cc29ec-secret-volume\") pod \"collect-profiles-29501715-pvnpj\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.668643 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e78f23f0-a7a1-489d-a40f-05d722cc29ec-config-volume\") pod \"collect-profiles-29501715-pvnpj\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.668665 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/760c2ebf-e516-4db6-a500-d2b897cc96de-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hs7h6\" (UID: \"760c2ebf-e516-4db6-a500-d2b897cc96de\") " pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.668703 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct72k\" (UniqueName: \"kubernetes.io/projected/e78f23f0-a7a1-489d-a40f-05d722cc29ec-kube-api-access-ct72k\") pod \"collect-profiles-29501715-pvnpj\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.668724 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/760c2ebf-e516-4db6-a500-d2b897cc96de-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hs7h6\" (UID: \"760c2ebf-e516-4db6-a500-d2b897cc96de\") " pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.668750 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfn4l\" (UniqueName: \"kubernetes.io/projected/760c2ebf-e516-4db6-a500-d2b897cc96de-kube-api-access-kfn4l\") pod \"marketplace-operator-79b997595-hs7h6\" (UID: \"760c2ebf-e516-4db6-a500-d2b897cc96de\") " pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.669857 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e78f23f0-a7a1-489d-a40f-05d722cc29ec-config-volume\") pod \"collect-profiles-29501715-pvnpj\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.670731 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/760c2ebf-e516-4db6-a500-d2b897cc96de-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hs7h6\" (UID: \"760c2ebf-e516-4db6-a500-d2b897cc96de\") " pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.688418 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/760c2ebf-e516-4db6-a500-d2b897cc96de-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hs7h6\" (UID: \"760c2ebf-e516-4db6-a500-d2b897cc96de\") " pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.693666 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfn4l\" (UniqueName: \"kubernetes.io/projected/760c2ebf-e516-4db6-a500-d2b897cc96de-kube-api-access-kfn4l\") pod \"marketplace-operator-79b997595-hs7h6\" (UID: \"760c2ebf-e516-4db6-a500-d2b897cc96de\") " pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.694897 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e78f23f0-a7a1-489d-a40f-05d722cc29ec-secret-volume\") pod \"collect-profiles-29501715-pvnpj\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.696382 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct72k\" (UniqueName: \"kubernetes.io/projected/e78f23f0-a7a1-489d-a40f-05d722cc29ec-kube-api-access-ct72k\") pod \"collect-profiles-29501715-pvnpj\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.722407 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.736993 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.800769 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.805704 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 03 07:15:01 crc kubenswrapper[4708]: E0203 07:15:01.807128 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb is running failed: container process not found" containerID="dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb" cmd=["grpc_health_probe","-addr=:50051"] Feb 03 07:15:01 crc kubenswrapper[4708]: E0203 07:15:01.807474 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb is running failed: container process not found" containerID="dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb" cmd=["grpc_health_probe","-addr=:50051"] Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.807569 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 03 07:15:01 crc kubenswrapper[4708]: E0203 07:15:01.807728 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb is running failed: container process not found" containerID="dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb" cmd=["grpc_health_probe","-addr=:50051"] Feb 03 07:15:01 crc kubenswrapper[4708]: E0203 07:15:01.807782 4708 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-gtjj2" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerName="registry-server" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.813241 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.829999 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.838132 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.909468 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.917483 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:15:01 crc kubenswrapper[4708]: I0203 07:15:01.943637 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:01.951610 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:01.954350 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:01.963274 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:01.973875 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.019359 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.048588 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.061084 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078576 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxj5g\" (UniqueName: \"kubernetes.io/projected/22892077-113b-4859-81cb-9ec0e6fc60ea-kube-api-access-mxj5g\") pod \"22892077-113b-4859-81cb-9ec0e6fc60ea\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078646 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkbmm\" (UniqueName: \"kubernetes.io/projected/42939d12-477f-4186-9d74-1b62ca36d039-kube-api-access-tkbmm\") pod \"42939d12-477f-4186-9d74-1b62ca36d039\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078687 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-utilities\") pod \"267fbe93-1af6-4a87-9720-c9d5cae93c91\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078712 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvtst\" (UniqueName: \"kubernetes.io/projected/70bd64d0-0ea0-4c56-9e7f-fc150343c834-kube-api-access-dvtst\") pod \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078751 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nv5lx\" (UniqueName: \"kubernetes.io/projected/267fbe93-1af6-4a87-9720-c9d5cae93c91-kube-api-access-nv5lx\") pod \"267fbe93-1af6-4a87-9720-c9d5cae93c91\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078777 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cnsl\" (UniqueName: \"kubernetes.io/projected/00c9d661-6c2e-48e7-9747-1476d52290a8-kube-api-access-7cnsl\") pod \"00c9d661-6c2e-48e7-9747-1476d52290a8\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078806 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-catalog-content\") pod \"42939d12-477f-4186-9d74-1b62ca36d039\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078879 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-utilities\") pod \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078920 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-catalog-content\") pod \"267fbe93-1af6-4a87-9720-c9d5cae93c91\" (UID: \"267fbe93-1af6-4a87-9720-c9d5cae93c91\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078971 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-trusted-ca\") pod \"00c9d661-6c2e-48e7-9747-1476d52290a8\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.078994 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-catalog-content\") pod \"22892077-113b-4859-81cb-9ec0e6fc60ea\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.079015 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-utilities\") pod \"22892077-113b-4859-81cb-9ec0e6fc60ea\" (UID: \"22892077-113b-4859-81cb-9ec0e6fc60ea\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.079047 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-catalog-content\") pod \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\" (UID: \"70bd64d0-0ea0-4c56-9e7f-fc150343c834\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.079071 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-operator-metrics\") pod \"00c9d661-6c2e-48e7-9747-1476d52290a8\" (UID: \"00c9d661-6c2e-48e7-9747-1476d52290a8\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.079103 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-utilities\") pod \"42939d12-477f-4186-9d74-1b62ca36d039\" (UID: \"42939d12-477f-4186-9d74-1b62ca36d039\") " Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.080167 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-utilities" (OuterVolumeSpecName: "utilities") pod "42939d12-477f-4186-9d74-1b62ca36d039" (UID: "42939d12-477f-4186-9d74-1b62ca36d039"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.080419 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-utilities" (OuterVolumeSpecName: "utilities") pod "70bd64d0-0ea0-4c56-9e7f-fc150343c834" (UID: "70bd64d0-0ea0-4c56-9e7f-fc150343c834"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.081087 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-utilities" (OuterVolumeSpecName: "utilities") pod "267fbe93-1af6-4a87-9720-c9d5cae93c91" (UID: "267fbe93-1af6-4a87-9720-c9d5cae93c91"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.081640 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "00c9d661-6c2e-48e7-9747-1476d52290a8" (UID: "00c9d661-6c2e-48e7-9747-1476d52290a8"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.081833 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-utilities" (OuterVolumeSpecName: "utilities") pod "22892077-113b-4859-81cb-9ec0e6fc60ea" (UID: "22892077-113b-4859-81cb-9ec0e6fc60ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.082995 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22892077-113b-4859-81cb-9ec0e6fc60ea-kube-api-access-mxj5g" (OuterVolumeSpecName: "kube-api-access-mxj5g") pod "22892077-113b-4859-81cb-9ec0e6fc60ea" (UID: "22892077-113b-4859-81cb-9ec0e6fc60ea"). InnerVolumeSpecName "kube-api-access-mxj5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.083576 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42939d12-477f-4186-9d74-1b62ca36d039-kube-api-access-tkbmm" (OuterVolumeSpecName: "kube-api-access-tkbmm") pod "42939d12-477f-4186-9d74-1b62ca36d039" (UID: "42939d12-477f-4186-9d74-1b62ca36d039"). InnerVolumeSpecName "kube-api-access-tkbmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.084443 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "00c9d661-6c2e-48e7-9747-1476d52290a8" (UID: "00c9d661-6c2e-48e7-9747-1476d52290a8"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.084886 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70bd64d0-0ea0-4c56-9e7f-fc150343c834-kube-api-access-dvtst" (OuterVolumeSpecName: "kube-api-access-dvtst") pod "70bd64d0-0ea0-4c56-9e7f-fc150343c834" (UID: "70bd64d0-0ea0-4c56-9e7f-fc150343c834"). InnerVolumeSpecName "kube-api-access-dvtst". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.086497 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00c9d661-6c2e-48e7-9747-1476d52290a8-kube-api-access-7cnsl" (OuterVolumeSpecName: "kube-api-access-7cnsl") pod "00c9d661-6c2e-48e7-9747-1476d52290a8" (UID: "00c9d661-6c2e-48e7-9747-1476d52290a8"). InnerVolumeSpecName "kube-api-access-7cnsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.087329 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/267fbe93-1af6-4a87-9720-c9d5cae93c91-kube-api-access-nv5lx" (OuterVolumeSpecName: "kube-api-access-nv5lx") pod "267fbe93-1af6-4a87-9720-c9d5cae93c91" (UID: "267fbe93-1af6-4a87-9720-c9d5cae93c91"). InnerVolumeSpecName "kube-api-access-nv5lx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.108652 4708 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.116315 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "70bd64d0-0ea0-4c56-9e7f-fc150343c834" (UID: "70bd64d0-0ea0-4c56-9e7f-fc150343c834"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.150492 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "22892077-113b-4859-81cb-9ec0e6fc60ea" (UID: "22892077-113b-4859-81cb-9ec0e6fc60ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.155003 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "267fbe93-1af6-4a87-9720-c9d5cae93c91" (UID: "267fbe93-1af6-4a87-9720-c9d5cae93c91"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180245 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180276 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvtst\" (UniqueName: \"kubernetes.io/projected/70bd64d0-0ea0-4c56-9e7f-fc150343c834-kube-api-access-dvtst\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180288 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nv5lx\" (UniqueName: \"kubernetes.io/projected/267fbe93-1af6-4a87-9720-c9d5cae93c91-kube-api-access-nv5lx\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180300 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cnsl\" (UniqueName: \"kubernetes.io/projected/00c9d661-6c2e-48e7-9747-1476d52290a8-kube-api-access-7cnsl\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180311 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180322 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/267fbe93-1af6-4a87-9720-c9d5cae93c91-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180333 4708 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180344 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180354 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22892077-113b-4859-81cb-9ec0e6fc60ea-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180367 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70bd64d0-0ea0-4c56-9e7f-fc150343c834-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180378 4708 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00c9d661-6c2e-48e7-9747-1476d52290a8-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180388 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180399 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxj5g\" (UniqueName: \"kubernetes.io/projected/22892077-113b-4859-81cb-9ec0e6fc60ea-kube-api-access-mxj5g\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.180410 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkbmm\" (UniqueName: \"kubernetes.io/projected/42939d12-477f-4186-9d74-1b62ca36d039-kube-api-access-tkbmm\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.232506 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "42939d12-477f-4186-9d74-1b62ca36d039" (UID: "42939d12-477f-4186-9d74-1b62ca36d039"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.251708 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.264407 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.269348 4708 generic.go:334] "Generic (PLEG): container finished" podID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerID="dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb" exitCode=0 Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.269411 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtjj2" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.269420 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtjj2" event={"ID":"267fbe93-1af6-4a87-9720-c9d5cae93c91","Type":"ContainerDied","Data":"dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb"} Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.269451 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtjj2" event={"ID":"267fbe93-1af6-4a87-9720-c9d5cae93c91","Type":"ContainerDied","Data":"6514529b62994a92590906716277dd9bb50f1f5e61a29b56a7189e6c5906e1de"} Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.269470 4708 scope.go:117] "RemoveContainer" containerID="dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.274203 4708 generic.go:334] "Generic (PLEG): container finished" podID="42939d12-477f-4186-9d74-1b62ca36d039" containerID="c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c" exitCode=0 Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.274272 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kclhl" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.274302 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kclhl" event={"ID":"42939d12-477f-4186-9d74-1b62ca36d039","Type":"ContainerDied","Data":"c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c"} Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.274377 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kclhl" event={"ID":"42939d12-477f-4186-9d74-1b62ca36d039","Type":"ContainerDied","Data":"b28ceea3b13fff703a6eee3171f861360da60f27e9cc4da71fd68fa1cdf7434f"} Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.278971 4708 generic.go:334] "Generic (PLEG): container finished" podID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerID="339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd" exitCode=0 Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.279027 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8rt" event={"ID":"22892077-113b-4859-81cb-9ec0e6fc60ea","Type":"ContainerDied","Data":"339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd"} Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.279050 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8rt" event={"ID":"22892077-113b-4859-81cb-9ec0e6fc60ea","Type":"ContainerDied","Data":"4e4094e6680d2a414e6b78744be9ab130396e5ed0a7bf501af71fed5d8b6b448"} Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.279104 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7d8rt" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.281062 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42939d12-477f-4186-9d74-1b62ca36d039-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.281762 4708 generic.go:334] "Generic (PLEG): container finished" podID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerID="eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241" exitCode=0 Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.281860 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ww8kc" event={"ID":"70bd64d0-0ea0-4c56-9e7f-fc150343c834","Type":"ContainerDied","Data":"eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241"} Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.281896 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ww8kc" event={"ID":"70bd64d0-0ea0-4c56-9e7f-fc150343c834","Type":"ContainerDied","Data":"86cf98deef8ce6035410020574858cfbd2e1f1c6287e36e04b5125f2d9a07808"} Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.281978 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ww8kc" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.283923 4708 generic.go:334] "Generic (PLEG): container finished" podID="00c9d661-6c2e-48e7-9747-1476d52290a8" containerID="35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b" exitCode=0 Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.283956 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.284014 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" event={"ID":"00c9d661-6c2e-48e7-9747-1476d52290a8","Type":"ContainerDied","Data":"35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b"} Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.284056 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t2lvz" event={"ID":"00c9d661-6c2e-48e7-9747-1476d52290a8","Type":"ContainerDied","Data":"c220d7283f48711217527650d279712c517a49b852d36354d878448ae936d5c4"} Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.289070 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.293558 4708 scope.go:117] "RemoveContainer" containerID="ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.307153 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.322400 4708 scope.go:117] "RemoveContainer" containerID="3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.324867 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t2lvz"] Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.328615 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t2lvz"] Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.332010 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.341049 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gtjj2"] Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.346460 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gtjj2"] Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.350325 4708 scope.go:117] "RemoveContainer" containerID="dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.350742 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb\": container with ID starting with dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb not found: ID does not exist" containerID="dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.350776 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb"} err="failed to get container status \"dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb\": rpc error: code = NotFound desc = could not find container \"dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb\": container with ID starting with dfdc0db7024cf00b7f93d79ab3553bb70280614bd106189ac994c86aca548aeb not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.350803 4708 scope.go:117] "RemoveContainer" containerID="ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.351166 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f\": container with ID starting with ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f not found: ID does not exist" containerID="ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.351206 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f"} err="failed to get container status \"ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f\": rpc error: code = NotFound desc = could not find container \"ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f\": container with ID starting with ae312b1ce6c2f8921d58231fdba9a2017777bfe2811ff890558ba7e8add16c0f not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.351234 4708 scope.go:117] "RemoveContainer" containerID="3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.351179 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ww8kc"] Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.351592 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8\": container with ID starting with 3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8 not found: ID does not exist" containerID="3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.351621 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8"} err="failed to get container status \"3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8\": rpc error: code = NotFound desc = could not find container \"3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8\": container with ID starting with 3911ede37c860b7be7c1cdc16810d956eaf691e31b020226a8f82d5c4846fdf8 not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.351638 4708 scope.go:117] "RemoveContainer" containerID="c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.355968 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ww8kc"] Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.360273 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kclhl"] Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.364837 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kclhl"] Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.368619 4708 scope.go:117] "RemoveContainer" containerID="49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.368910 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7d8rt"] Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.372792 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7d8rt"] Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.375323 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.382408 4708 scope.go:117] "RemoveContainer" containerID="8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.392522 4708 scope.go:117] "RemoveContainer" containerID="c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.393035 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c\": container with ID starting with c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c not found: ID does not exist" containerID="c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.393092 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c"} err="failed to get container status \"c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c\": rpc error: code = NotFound desc = could not find container \"c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c\": container with ID starting with c13db8cb2f32d59f9125fc2c4896427ef5e16f72b11ed8995adfc29c4110b29c not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.393124 4708 scope.go:117] "RemoveContainer" containerID="49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.393456 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a\": container with ID starting with 49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a not found: ID does not exist" containerID="49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.393480 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a"} err="failed to get container status \"49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a\": rpc error: code = NotFound desc = could not find container \"49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a\": container with ID starting with 49dc98a412a3ec33501988a21078410163527f98b6322ae6cb7f25f4be49f74a not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.393501 4708 scope.go:117] "RemoveContainer" containerID="8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.393745 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f\": container with ID starting with 8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f not found: ID does not exist" containerID="8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.393777 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f"} err="failed to get container status \"8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f\": rpc error: code = NotFound desc = could not find container \"8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f\": container with ID starting with 8e538a3b7ef066e48e6ed9c236249888f5a60ebffe1f9a8ed9d4f9914628782f not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.393825 4708 scope.go:117] "RemoveContainer" containerID="339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.400634 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.402191 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.406979 4708 scope.go:117] "RemoveContainer" containerID="cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.409194 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.421936 4708 scope.go:117] "RemoveContainer" containerID="3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.423993 4708 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.435627 4708 scope.go:117] "RemoveContainer" containerID="339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.436040 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd\": container with ID starting with 339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd not found: ID does not exist" containerID="339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.436067 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd"} err="failed to get container status \"339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd\": rpc error: code = NotFound desc = could not find container \"339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd\": container with ID starting with 339e08a54a56cc3eb88f2a12ca8eb0c089c0c8384a0ce35a82e234b6ca1d83cd not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.436089 4708 scope.go:117] "RemoveContainer" containerID="cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.436464 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc\": container with ID starting with cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc not found: ID does not exist" containerID="cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.436505 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc"} err="failed to get container status \"cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc\": rpc error: code = NotFound desc = could not find container \"cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc\": container with ID starting with cd7076c59aec17da8d46286d6239d88d01c8dafb71c26ae82b03b14b0ebbe6bc not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.436537 4708 scope.go:117] "RemoveContainer" containerID="3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.436955 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac\": container with ID starting with 3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac not found: ID does not exist" containerID="3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.436988 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac"} err="failed to get container status \"3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac\": rpc error: code = NotFound desc = could not find container \"3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac\": container with ID starting with 3b88c4bef80cfbefafa4f7fbf783eb555d5c5121853cac161ac28d018af06eac not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.437009 4708 scope.go:117] "RemoveContainer" containerID="eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.449628 4708 scope.go:117] "RemoveContainer" containerID="5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.468281 4708 scope.go:117] "RemoveContainer" containerID="10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.480473 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.482751 4708 scope.go:117] "RemoveContainer" containerID="eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.483331 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241\": container with ID starting with eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241 not found: ID does not exist" containerID="eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.483373 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241"} err="failed to get container status \"eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241\": rpc error: code = NotFound desc = could not find container \"eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241\": container with ID starting with eab8451c3757e973623fa044f89ba6b1b69604c6008177532b43d4b678f5f241 not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.483409 4708 scope.go:117] "RemoveContainer" containerID="5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.483769 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85\": container with ID starting with 5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85 not found: ID does not exist" containerID="5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.483794 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85"} err="failed to get container status \"5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85\": rpc error: code = NotFound desc = could not find container \"5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85\": container with ID starting with 5130bd60935ad6a145f37c81b74e444add933f38d687416b3ec0b1f20531af85 not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.483839 4708 scope.go:117] "RemoveContainer" containerID="10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.484285 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855\": container with ID starting with 10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855 not found: ID does not exist" containerID="10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.484304 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855"} err="failed to get container status \"10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855\": rpc error: code = NotFound desc = could not find container \"10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855\": container with ID starting with 10bcb44e4d98fe66ec41f33f1515f695f868787832ae331b590823d084d34855 not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.484316 4708 scope.go:117] "RemoveContainer" containerID="35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.497066 4708 scope.go:117] "RemoveContainer" containerID="35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b" Feb 03 07:15:02 crc kubenswrapper[4708]: E0203 07:15:02.497618 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b\": container with ID starting with 35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b not found: ID does not exist" containerID="35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.497663 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b"} err="failed to get container status \"35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b\": rpc error: code = NotFound desc = could not find container \"35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b\": container with ID starting with 35b4a48788c3c3dd5793edefe2c4ba7cd2190bf7cce8ddd35de0b6910fdb9c1b not found: ID does not exist" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.510105 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.554499 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.615784 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.640903 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.644089 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.895940 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.943347 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 03 07:15:02 crc kubenswrapper[4708]: I0203 07:15:02.958379 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.021259 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.103651 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.135991 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.158016 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.170015 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.245990 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.275848 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.374459 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.435241 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.469872 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.499149 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.502764 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.547209 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.577097 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.577098 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.589121 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.726971 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 03 07:15:03 crc kubenswrapper[4708]: I0203 07:15:03.780773 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.071526 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.101555 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00c9d661-6c2e-48e7-9747-1476d52290a8" path="/var/lib/kubelet/pods/00c9d661-6c2e-48e7-9747-1476d52290a8/volumes" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.102340 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" path="/var/lib/kubelet/pods/22892077-113b-4859-81cb-9ec0e6fc60ea/volumes" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.103097 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" path="/var/lib/kubelet/pods/267fbe93-1af6-4a87-9720-c9d5cae93c91/volumes" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.109535 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42939d12-477f-4186-9d74-1b62ca36d039" path="/var/lib/kubelet/pods/42939d12-477f-4186-9d74-1b62ca36d039/volumes" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.112103 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" path="/var/lib/kubelet/pods/70bd64d0-0ea0-4c56-9e7f-fc150343c834/volumes" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.299345 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.305829 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.370447 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.427114 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hs7h6"] Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.429740 4708 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.430286 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321" gracePeriod=5 Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.437729 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj"] Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.481615 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.512755 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.576578 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.688356 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.705186 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.756302 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.779617 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.807760 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.925407 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.937569 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 03 07:15:04 crc kubenswrapper[4708]: E0203 07:15:04.939806 4708 log.go:32] "RunPodSandbox from runtime service failed" err=< Feb 03 07:15:04 crc kubenswrapper[4708]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-hs7h6_openshift-marketplace_760c2ebf-e516-4db6-a500-d2b897cc96de_0(dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f): error adding pod openshift-marketplace_marketplace-operator-79b997595-hs7h6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f" Netns:"/var/run/netns/9f3a3360-436f-419d-a667-36feb114d976" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-hs7h6;K8S_POD_INFRA_CONTAINER_ID=dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f;K8S_POD_UID=760c2ebf-e516-4db6-a500-d2b897cc96de" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-hs7h6] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-hs7h6/760c2ebf-e516-4db6-a500-d2b897cc96de]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod marketplace-operator-79b997595-hs7h6 in out of cluster comm: pod "marketplace-operator-79b997595-hs7h6" not found Feb 03 07:15:04 crc kubenswrapper[4708]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:15:04 crc kubenswrapper[4708]: > Feb 03 07:15:04 crc kubenswrapper[4708]: E0203 07:15:04.943023 4708 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Feb 03 07:15:04 crc kubenswrapper[4708]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-hs7h6_openshift-marketplace_760c2ebf-e516-4db6-a500-d2b897cc96de_0(dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f): error adding pod openshift-marketplace_marketplace-operator-79b997595-hs7h6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f" Netns:"/var/run/netns/9f3a3360-436f-419d-a667-36feb114d976" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-hs7h6;K8S_POD_INFRA_CONTAINER_ID=dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f;K8S_POD_UID=760c2ebf-e516-4db6-a500-d2b897cc96de" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-hs7h6] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-hs7h6/760c2ebf-e516-4db6-a500-d2b897cc96de]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod marketplace-operator-79b997595-hs7h6 in out of cluster comm: pod "marketplace-operator-79b997595-hs7h6" not found Feb 03 07:15:04 crc kubenswrapper[4708]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:15:04 crc kubenswrapper[4708]: > pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:04 crc kubenswrapper[4708]: E0203 07:15:04.943055 4708 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Feb 03 07:15:04 crc kubenswrapper[4708]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-hs7h6_openshift-marketplace_760c2ebf-e516-4db6-a500-d2b897cc96de_0(dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f): error adding pod openshift-marketplace_marketplace-operator-79b997595-hs7h6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f" Netns:"/var/run/netns/9f3a3360-436f-419d-a667-36feb114d976" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-hs7h6;K8S_POD_INFRA_CONTAINER_ID=dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f;K8S_POD_UID=760c2ebf-e516-4db6-a500-d2b897cc96de" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-hs7h6] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-hs7h6/760c2ebf-e516-4db6-a500-d2b897cc96de]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod marketplace-operator-79b997595-hs7h6 in out of cluster comm: pod "marketplace-operator-79b997595-hs7h6" not found Feb 03 07:15:04 crc kubenswrapper[4708]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:15:04 crc kubenswrapper[4708]: > pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:04 crc kubenswrapper[4708]: E0203 07:15:04.943127 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-hs7h6_openshift-marketplace(760c2ebf-e516-4db6-a500-d2b897cc96de)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-hs7h6_openshift-marketplace(760c2ebf-e516-4db6-a500-d2b897cc96de)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-hs7h6_openshift-marketplace_760c2ebf-e516-4db6-a500-d2b897cc96de_0(dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f): error adding pod openshift-marketplace_marketplace-operator-79b997595-hs7h6 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f\\\" Netns:\\\"/var/run/netns/9f3a3360-436f-419d-a667-36feb114d976\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-hs7h6;K8S_POD_INFRA_CONTAINER_ID=dd754fde0c6e87e12bd709e886ac54a45def2d81dd2fe3a35eeadf2690d8540f;K8S_POD_UID=760c2ebf-e516-4db6-a500-d2b897cc96de\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-hs7h6] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-hs7h6/760c2ebf-e516-4db6-a500-d2b897cc96de]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod marketplace-operator-79b997595-hs7h6 in out of cluster comm: pod \\\"marketplace-operator-79b997595-hs7h6\\\" not found\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" podUID="760c2ebf-e516-4db6-a500-d2b897cc96de" Feb 03 07:15:04 crc kubenswrapper[4708]: E0203 07:15:04.945138 4708 log.go:32] "RunPodSandbox from runtime service failed" err=< Feb 03 07:15:04 crc kubenswrapper[4708]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501715-pvnpj_openshift-operator-lifecycle-manager_e78f23f0-a7a1-489d-a40f-05d722cc29ec_0(a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e): error adding pod openshift-operator-lifecycle-manager_collect-profiles-29501715-pvnpj to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e" Netns:"/var/run/netns/55f05ae3-3ef1-430e-981f-e7894fdb8df9" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29501715-pvnpj;K8S_POD_INFRA_CONTAINER_ID=a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e;K8S_POD_UID=e78f23f0-a7a1-489d-a40f-05d722cc29ec" Path:"" ERRORED: error configuring pod [openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj] networking: Multus: [openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj/e78f23f0-a7a1-489d-a40f-05d722cc29ec]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod collect-profiles-29501715-pvnpj in out of cluster comm: pod "collect-profiles-29501715-pvnpj" not found Feb 03 07:15:04 crc kubenswrapper[4708]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:15:04 crc kubenswrapper[4708]: > Feb 03 07:15:04 crc kubenswrapper[4708]: E0203 07:15:04.945193 4708 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Feb 03 07:15:04 crc kubenswrapper[4708]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501715-pvnpj_openshift-operator-lifecycle-manager_e78f23f0-a7a1-489d-a40f-05d722cc29ec_0(a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e): error adding pod openshift-operator-lifecycle-manager_collect-profiles-29501715-pvnpj to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e" Netns:"/var/run/netns/55f05ae3-3ef1-430e-981f-e7894fdb8df9" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29501715-pvnpj;K8S_POD_INFRA_CONTAINER_ID=a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e;K8S_POD_UID=e78f23f0-a7a1-489d-a40f-05d722cc29ec" Path:"" ERRORED: error configuring pod [openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj] networking: Multus: [openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj/e78f23f0-a7a1-489d-a40f-05d722cc29ec]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod collect-profiles-29501715-pvnpj in out of cluster comm: pod "collect-profiles-29501715-pvnpj" not found Feb 03 07:15:04 crc kubenswrapper[4708]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:15:04 crc kubenswrapper[4708]: > pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:04 crc kubenswrapper[4708]: E0203 07:15:04.945210 4708 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Feb 03 07:15:04 crc kubenswrapper[4708]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501715-pvnpj_openshift-operator-lifecycle-manager_e78f23f0-a7a1-489d-a40f-05d722cc29ec_0(a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e): error adding pod openshift-operator-lifecycle-manager_collect-profiles-29501715-pvnpj to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e" Netns:"/var/run/netns/55f05ae3-3ef1-430e-981f-e7894fdb8df9" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29501715-pvnpj;K8S_POD_INFRA_CONTAINER_ID=a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e;K8S_POD_UID=e78f23f0-a7a1-489d-a40f-05d722cc29ec" Path:"" ERRORED: error configuring pod [openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj] networking: Multus: [openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj/e78f23f0-a7a1-489d-a40f-05d722cc29ec]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod collect-profiles-29501715-pvnpj in out of cluster comm: pod "collect-profiles-29501715-pvnpj" not found Feb 03 07:15:04 crc kubenswrapper[4708]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:15:04 crc kubenswrapper[4708]: > pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:04 crc kubenswrapper[4708]: E0203 07:15:04.945331 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"collect-profiles-29501715-pvnpj_openshift-operator-lifecycle-manager(e78f23f0-a7a1-489d-a40f-05d722cc29ec)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"collect-profiles-29501715-pvnpj_openshift-operator-lifecycle-manager(e78f23f0-a7a1-489d-a40f-05d722cc29ec)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501715-pvnpj_openshift-operator-lifecycle-manager_e78f23f0-a7a1-489d-a40f-05d722cc29ec_0(a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e): error adding pod openshift-operator-lifecycle-manager_collect-profiles-29501715-pvnpj to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e\\\" Netns:\\\"/var/run/netns/55f05ae3-3ef1-430e-981f-e7894fdb8df9\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-operator-lifecycle-manager;K8S_POD_NAME=collect-profiles-29501715-pvnpj;K8S_POD_INFRA_CONTAINER_ID=a3b7deb464e0b93b02217ec3b6fc7ef6055adcb6b607ef84da7a142cc8933a9e;K8S_POD_UID=e78f23f0-a7a1-489d-a40f-05d722cc29ec\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj] networking: Multus: [openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj/e78f23f0-a7a1-489d-a40f-05d722cc29ec]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod collect-profiles-29501715-pvnpj in out of cluster comm: pod \\\"collect-profiles-29501715-pvnpj\\\" not found\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" podUID="e78f23f0-a7a1-489d-a40f-05d722cc29ec" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.974154 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 03 07:15:04 crc kubenswrapper[4708]: I0203 07:15:04.995943 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.008154 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.210723 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.248219 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.269680 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.310506 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.311273 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.311701 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.311984 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.365058 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.395378 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.468716 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.480886 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.489371 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.509165 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.521907 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.608375 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.626226 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.686494 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.739591 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.779122 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 03 07:15:05 crc kubenswrapper[4708]: I0203 07:15:05.832524 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 03 07:15:06 crc kubenswrapper[4708]: I0203 07:15:06.205227 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 03 07:15:06 crc kubenswrapper[4708]: I0203 07:15:06.289257 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 03 07:15:06 crc kubenswrapper[4708]: I0203 07:15:06.362399 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 03 07:15:06 crc kubenswrapper[4708]: I0203 07:15:06.406762 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 03 07:15:06 crc kubenswrapper[4708]: I0203 07:15:06.480612 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 03 07:15:06 crc kubenswrapper[4708]: I0203 07:15:06.627527 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 03 07:15:06 crc kubenswrapper[4708]: I0203 07:15:06.729771 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 03 07:15:06 crc kubenswrapper[4708]: I0203 07:15:06.795371 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 03 07:15:06 crc kubenswrapper[4708]: I0203 07:15:06.807179 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 03 07:15:07 crc kubenswrapper[4708]: I0203 07:15:07.128769 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 03 07:15:07 crc kubenswrapper[4708]: I0203 07:15:07.178663 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 03 07:15:07 crc kubenswrapper[4708]: I0203 07:15:07.249404 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 03 07:15:07 crc kubenswrapper[4708]: I0203 07:15:07.270916 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 03 07:15:07 crc kubenswrapper[4708]: I0203 07:15:07.298624 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 03 07:15:07 crc kubenswrapper[4708]: I0203 07:15:07.517105 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 03 07:15:07 crc kubenswrapper[4708]: I0203 07:15:07.540300 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 03 07:15:07 crc kubenswrapper[4708]: I0203 07:15:07.920264 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 03 07:15:08 crc kubenswrapper[4708]: I0203 07:15:08.018161 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 03 07:15:08 crc kubenswrapper[4708]: I0203 07:15:08.044620 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 03 07:15:08 crc kubenswrapper[4708]: I0203 07:15:08.112884 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 03 07:15:08 crc kubenswrapper[4708]: I0203 07:15:08.130532 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 03 07:15:08 crc kubenswrapper[4708]: E0203 07:15:08.190712 4708 log.go:32] "RunPodSandbox from runtime service failed" err=< Feb 03 07:15:08 crc kubenswrapper[4708]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-hs7h6_openshift-marketplace_760c2ebf-e516-4db6-a500-d2b897cc96de_0(c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b): error adding pod openshift-marketplace_marketplace-operator-79b997595-hs7h6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b" Netns:"/var/run/netns/371d2134-61ec-427e-8825-144e46cceaf2" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-hs7h6;K8S_POD_INFRA_CONTAINER_ID=c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b;K8S_POD_UID=760c2ebf-e516-4db6-a500-d2b897cc96de" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-hs7h6] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-hs7h6/760c2ebf-e516-4db6-a500-d2b897cc96de]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod marketplace-operator-79b997595-hs7h6 in out of cluster comm: pod "marketplace-operator-79b997595-hs7h6" not found Feb 03 07:15:08 crc kubenswrapper[4708]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:15:08 crc kubenswrapper[4708]: > Feb 03 07:15:08 crc kubenswrapper[4708]: E0203 07:15:08.190843 4708 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Feb 03 07:15:08 crc kubenswrapper[4708]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-hs7h6_openshift-marketplace_760c2ebf-e516-4db6-a500-d2b897cc96de_0(c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b): error adding pod openshift-marketplace_marketplace-operator-79b997595-hs7h6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b" Netns:"/var/run/netns/371d2134-61ec-427e-8825-144e46cceaf2" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-hs7h6;K8S_POD_INFRA_CONTAINER_ID=c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b;K8S_POD_UID=760c2ebf-e516-4db6-a500-d2b897cc96de" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-hs7h6] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-hs7h6/760c2ebf-e516-4db6-a500-d2b897cc96de]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod marketplace-operator-79b997595-hs7h6 in out of cluster comm: pod "marketplace-operator-79b997595-hs7h6" not found Feb 03 07:15:08 crc kubenswrapper[4708]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:15:08 crc kubenswrapper[4708]: > pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:08 crc kubenswrapper[4708]: E0203 07:15:08.190873 4708 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Feb 03 07:15:08 crc kubenswrapper[4708]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-hs7h6_openshift-marketplace_760c2ebf-e516-4db6-a500-d2b897cc96de_0(c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b): error adding pod openshift-marketplace_marketplace-operator-79b997595-hs7h6 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b" Netns:"/var/run/netns/371d2134-61ec-427e-8825-144e46cceaf2" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-hs7h6;K8S_POD_INFRA_CONTAINER_ID=c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b;K8S_POD_UID=760c2ebf-e516-4db6-a500-d2b897cc96de" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-hs7h6] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-hs7h6/760c2ebf-e516-4db6-a500-d2b897cc96de]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod marketplace-operator-79b997595-hs7h6 in out of cluster comm: pod "marketplace-operator-79b997595-hs7h6" not found Feb 03 07:15:08 crc kubenswrapper[4708]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Feb 03 07:15:08 crc kubenswrapper[4708]: > pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:08 crc kubenswrapper[4708]: E0203 07:15:08.190978 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-hs7h6_openshift-marketplace(760c2ebf-e516-4db6-a500-d2b897cc96de)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-hs7h6_openshift-marketplace(760c2ebf-e516-4db6-a500-d2b897cc96de)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-hs7h6_openshift-marketplace_760c2ebf-e516-4db6-a500-d2b897cc96de_0(c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b): error adding pod openshift-marketplace_marketplace-operator-79b997595-hs7h6 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b\\\" Netns:\\\"/var/run/netns/371d2134-61ec-427e-8825-144e46cceaf2\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-hs7h6;K8S_POD_INFRA_CONTAINER_ID=c8c2a157b3279cd2b445f43303b7bfc3df53c6eb04ff080717cc57aaab9ecd5b;K8S_POD_UID=760c2ebf-e516-4db6-a500-d2b897cc96de\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-hs7h6] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-hs7h6/760c2ebf-e516-4db6-a500-d2b897cc96de]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod marketplace-operator-79b997595-hs7h6 in out of cluster comm: pod \\\"marketplace-operator-79b997595-hs7h6\\\" not found\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" podUID="760c2ebf-e516-4db6-a500-d2b897cc96de" Feb 03 07:15:08 crc kubenswrapper[4708]: I0203 07:15:08.272157 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj"] Feb 03 07:15:08 crc kubenswrapper[4708]: I0203 07:15:08.326243 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" event={"ID":"e78f23f0-a7a1-489d-a40f-05d722cc29ec","Type":"ContainerStarted","Data":"47371d6ca16b70a3dc1f1294582817a69e3a8ea108d2ce7d2419a51be26963c4"} Feb 03 07:15:09 crc kubenswrapper[4708]: I0203 07:15:09.335297 4708 generic.go:334] "Generic (PLEG): container finished" podID="e78f23f0-a7a1-489d-a40f-05d722cc29ec" containerID="5dd526e9bc69e61d7e2dc3071fe7d5c24ce1a4ba3e570e41303812c0614682e6" exitCode=0 Feb 03 07:15:09 crc kubenswrapper[4708]: I0203 07:15:09.335362 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" event={"ID":"e78f23f0-a7a1-489d-a40f-05d722cc29ec","Type":"ContainerDied","Data":"5dd526e9bc69e61d7e2dc3071fe7d5c24ce1a4ba3e570e41303812c0614682e6"} Feb 03 07:15:09 crc kubenswrapper[4708]: I0203 07:15:09.419491 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 03 07:15:09 crc kubenswrapper[4708]: I0203 07:15:09.933096 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.009877 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.010508 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.098823 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.098897 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.098926 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.098954 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.099004 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.098944 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.099017 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.099042 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.099068 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.099223 4708 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.099234 4708 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.099243 4708 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.099252 4708 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.106181 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.200377 4708 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.342038 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.342088 4708 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321" exitCode=137 Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.342161 4708 scope.go:117] "RemoveContainer" containerID="3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.342192 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.372695 4708 scope.go:117] "RemoveContainer" containerID="3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321" Feb 03 07:15:10 crc kubenswrapper[4708]: E0203 07:15:10.373257 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321\": container with ID starting with 3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321 not found: ID does not exist" containerID="3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.373300 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321"} err="failed to get container status \"3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321\": rpc error: code = NotFound desc = could not find container \"3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321\": container with ID starting with 3fe4e4a66cf37e1b0ea95cf2332802ce10391026dab61bfe659a70c05887a321 not found: ID does not exist" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.568092 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.707663 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e78f23f0-a7a1-489d-a40f-05d722cc29ec-config-volume\") pod \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.708207 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e78f23f0-a7a1-489d-a40f-05d722cc29ec-secret-volume\") pod \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.708353 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ct72k\" (UniqueName: \"kubernetes.io/projected/e78f23f0-a7a1-489d-a40f-05d722cc29ec-kube-api-access-ct72k\") pod \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\" (UID: \"e78f23f0-a7a1-489d-a40f-05d722cc29ec\") " Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.708279 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e78f23f0-a7a1-489d-a40f-05d722cc29ec-config-volume" (OuterVolumeSpecName: "config-volume") pod "e78f23f0-a7a1-489d-a40f-05d722cc29ec" (UID: "e78f23f0-a7a1-489d-a40f-05d722cc29ec"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.713021 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e78f23f0-a7a1-489d-a40f-05d722cc29ec-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e78f23f0-a7a1-489d-a40f-05d722cc29ec" (UID: "e78f23f0-a7a1-489d-a40f-05d722cc29ec"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.714533 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e78f23f0-a7a1-489d-a40f-05d722cc29ec-kube-api-access-ct72k" (OuterVolumeSpecName: "kube-api-access-ct72k") pod "e78f23f0-a7a1-489d-a40f-05d722cc29ec" (UID: "e78f23f0-a7a1-489d-a40f-05d722cc29ec"). InnerVolumeSpecName "kube-api-access-ct72k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.809595 4708 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e78f23f0-a7a1-489d-a40f-05d722cc29ec-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.809654 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ct72k\" (UniqueName: \"kubernetes.io/projected/e78f23f0-a7a1-489d-a40f-05d722cc29ec-kube-api-access-ct72k\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:10 crc kubenswrapper[4708]: I0203 07:15:10.809666 4708 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e78f23f0-a7a1-489d-a40f-05d722cc29ec-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:11 crc kubenswrapper[4708]: I0203 07:15:11.349341 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" event={"ID":"e78f23f0-a7a1-489d-a40f-05d722cc29ec","Type":"ContainerDied","Data":"47371d6ca16b70a3dc1f1294582817a69e3a8ea108d2ce7d2419a51be26963c4"} Feb 03 07:15:11 crc kubenswrapper[4708]: I0203 07:15:11.349741 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47371d6ca16b70a3dc1f1294582817a69e3a8ea108d2ce7d2419a51be26963c4" Feb 03 07:15:11 crc kubenswrapper[4708]: I0203 07:15:11.349955 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501715-pvnpj" Feb 03 07:15:12 crc kubenswrapper[4708]: I0203 07:15:12.107452 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Feb 03 07:15:21 crc kubenswrapper[4708]: I0203 07:15:21.793358 4708 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Feb 03 07:15:23 crc kubenswrapper[4708]: I0203 07:15:23.092000 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:23 crc kubenswrapper[4708]: I0203 07:15:23.092845 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:23 crc kubenswrapper[4708]: I0203 07:15:23.328371 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hs7h6"] Feb 03 07:15:23 crc kubenswrapper[4708]: W0203 07:15:23.334758 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod760c2ebf_e516_4db6_a500_d2b897cc96de.slice/crio-8d8a3a06cf886a140b516b6efd5e277cf4dc914506d3d7cdbde201a005d73bb0 WatchSource:0}: Error finding container 8d8a3a06cf886a140b516b6efd5e277cf4dc914506d3d7cdbde201a005d73bb0: Status 404 returned error can't find the container with id 8d8a3a06cf886a140b516b6efd5e277cf4dc914506d3d7cdbde201a005d73bb0 Feb 03 07:15:23 crc kubenswrapper[4708]: I0203 07:15:23.434286 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" event={"ID":"760c2ebf-e516-4db6-a500-d2b897cc96de","Type":"ContainerStarted","Data":"8d8a3a06cf886a140b516b6efd5e277cf4dc914506d3d7cdbde201a005d73bb0"} Feb 03 07:15:24 crc kubenswrapper[4708]: I0203 07:15:24.441817 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" event={"ID":"760c2ebf-e516-4db6-a500-d2b897cc96de","Type":"ContainerStarted","Data":"00c619d43f20f1d478e159dc63b1643c1dc84ec507e4197ce162a01ef1d56b35"} Feb 03 07:15:24 crc kubenswrapper[4708]: I0203 07:15:24.442220 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:24 crc kubenswrapper[4708]: I0203 07:15:24.446781 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" Feb 03 07:15:24 crc kubenswrapper[4708]: I0203 07:15:24.462957 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hs7h6" podStartSLOduration=29.46293192 podStartE2EDuration="29.46293192s" podCreationTimestamp="2026-02-03 07:14:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:15:24.459236572 +0000 UTC m=+303.441183379" watchObservedRunningTime="2026-02-03 07:15:24.46293192 +0000 UTC m=+303.444878737" Feb 03 07:15:31 crc kubenswrapper[4708]: I0203 07:15:31.818700 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xpdpr"] Feb 03 07:15:31 crc kubenswrapper[4708]: I0203 07:15:31.819590 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" podUID="d1edd916-a3bf-4331-abba-d5c8753d4377" containerName="controller-manager" containerID="cri-o://4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa" gracePeriod=30 Feb 03 07:15:31 crc kubenswrapper[4708]: I0203 07:15:31.915303 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf"] Feb 03 07:15:31 crc kubenswrapper[4708]: I0203 07:15:31.915607 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" podUID="987304c9-45fa-40ab-a687-528d1e8f69d3" containerName="route-controller-manager" containerID="cri-o://aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8" gracePeriod=30 Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.181324 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.270939 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.330958 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-client-ca\") pod \"d1edd916-a3bf-4331-abba-d5c8753d4377\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.331020 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-config\") pod \"d1edd916-a3bf-4331-abba-d5c8753d4377\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.331313 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs8bz\" (UniqueName: \"kubernetes.io/projected/d1edd916-a3bf-4331-abba-d5c8753d4377-kube-api-access-qs8bz\") pod \"d1edd916-a3bf-4331-abba-d5c8753d4377\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.331338 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1edd916-a3bf-4331-abba-d5c8753d4377-serving-cert\") pod \"d1edd916-a3bf-4331-abba-d5c8753d4377\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.331376 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-proxy-ca-bundles\") pod \"d1edd916-a3bf-4331-abba-d5c8753d4377\" (UID: \"d1edd916-a3bf-4331-abba-d5c8753d4377\") " Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.331455 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-client-ca" (OuterVolumeSpecName: "client-ca") pod "d1edd916-a3bf-4331-abba-d5c8753d4377" (UID: "d1edd916-a3bf-4331-abba-d5c8753d4377"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.331586 4708 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.331601 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-config" (OuterVolumeSpecName: "config") pod "d1edd916-a3bf-4331-abba-d5c8753d4377" (UID: "d1edd916-a3bf-4331-abba-d5c8753d4377"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.331906 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d1edd916-a3bf-4331-abba-d5c8753d4377" (UID: "d1edd916-a3bf-4331-abba-d5c8753d4377"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.336135 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1edd916-a3bf-4331-abba-d5c8753d4377-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d1edd916-a3bf-4331-abba-d5c8753d4377" (UID: "d1edd916-a3bf-4331-abba-d5c8753d4377"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.336171 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1edd916-a3bf-4331-abba-d5c8753d4377-kube-api-access-qs8bz" (OuterVolumeSpecName: "kube-api-access-qs8bz") pod "d1edd916-a3bf-4331-abba-d5c8753d4377" (UID: "d1edd916-a3bf-4331-abba-d5c8753d4377"). InnerVolumeSpecName "kube-api-access-qs8bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.432322 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-config\") pod \"987304c9-45fa-40ab-a687-528d1e8f69d3\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.432403 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xzjb\" (UniqueName: \"kubernetes.io/projected/987304c9-45fa-40ab-a687-528d1e8f69d3-kube-api-access-4xzjb\") pod \"987304c9-45fa-40ab-a687-528d1e8f69d3\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.432433 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/987304c9-45fa-40ab-a687-528d1e8f69d3-serving-cert\") pod \"987304c9-45fa-40ab-a687-528d1e8f69d3\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.432491 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-client-ca\") pod \"987304c9-45fa-40ab-a687-528d1e8f69d3\" (UID: \"987304c9-45fa-40ab-a687-528d1e8f69d3\") " Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.432723 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs8bz\" (UniqueName: \"kubernetes.io/projected/d1edd916-a3bf-4331-abba-d5c8753d4377-kube-api-access-qs8bz\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.432740 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1edd916-a3bf-4331-abba-d5c8753d4377-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.432752 4708 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.432763 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1edd916-a3bf-4331-abba-d5c8753d4377-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.433289 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-client-ca" (OuterVolumeSpecName: "client-ca") pod "987304c9-45fa-40ab-a687-528d1e8f69d3" (UID: "987304c9-45fa-40ab-a687-528d1e8f69d3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.433323 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-config" (OuterVolumeSpecName: "config") pod "987304c9-45fa-40ab-a687-528d1e8f69d3" (UID: "987304c9-45fa-40ab-a687-528d1e8f69d3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.436343 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/987304c9-45fa-40ab-a687-528d1e8f69d3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "987304c9-45fa-40ab-a687-528d1e8f69d3" (UID: "987304c9-45fa-40ab-a687-528d1e8f69d3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.436978 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/987304c9-45fa-40ab-a687-528d1e8f69d3-kube-api-access-4xzjb" (OuterVolumeSpecName: "kube-api-access-4xzjb") pod "987304c9-45fa-40ab-a687-528d1e8f69d3" (UID: "987304c9-45fa-40ab-a687-528d1e8f69d3"). InnerVolumeSpecName "kube-api-access-4xzjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.491338 4708 generic.go:334] "Generic (PLEG): container finished" podID="d1edd916-a3bf-4331-abba-d5c8753d4377" containerID="4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa" exitCode=0 Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.491401 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.491421 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" event={"ID":"d1edd916-a3bf-4331-abba-d5c8753d4377","Type":"ContainerDied","Data":"4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa"} Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.491453 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xpdpr" event={"ID":"d1edd916-a3bf-4331-abba-d5c8753d4377","Type":"ContainerDied","Data":"49a55fc13bcb2a8944212ccb5cf9a50ce1790e5b307de5ff5933e629d8559486"} Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.491471 4708 scope.go:117] "RemoveContainer" containerID="4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.494517 4708 generic.go:334] "Generic (PLEG): container finished" podID="987304c9-45fa-40ab-a687-528d1e8f69d3" containerID="aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8" exitCode=0 Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.494553 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" event={"ID":"987304c9-45fa-40ab-a687-528d1e8f69d3","Type":"ContainerDied","Data":"aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8"} Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.494577 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" event={"ID":"987304c9-45fa-40ab-a687-528d1e8f69d3","Type":"ContainerDied","Data":"4157c3ec9b21115051822dc58c5100e509523f86163695168100bbcfdb6b6e8d"} Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.494738 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.513668 4708 scope.go:117] "RemoveContainer" containerID="4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa" Feb 03 07:15:32 crc kubenswrapper[4708]: E0203 07:15:32.514153 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa\": container with ID starting with 4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa not found: ID does not exist" containerID="4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.514272 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa"} err="failed to get container status \"4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa\": rpc error: code = NotFound desc = could not find container \"4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa\": container with ID starting with 4dc81aa73a83defe1494b090180b53285511a377b2901f51af1aac14b02ca8aa not found: ID does not exist" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.514359 4708 scope.go:117] "RemoveContainer" containerID="aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.524532 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf"] Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.528966 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-748cf"] Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.532791 4708 scope.go:117] "RemoveContainer" containerID="aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8" Feb 03 07:15:32 crc kubenswrapper[4708]: E0203 07:15:32.533425 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8\": container with ID starting with aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8 not found: ID does not exist" containerID="aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.533544 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8"} err="failed to get container status \"aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8\": rpc error: code = NotFound desc = could not find container \"aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8\": container with ID starting with aceb0cc7013d2b30eb0f12195fa4123ea7c605f8068f8f6dd0dcf34d35ff5ec8 not found: ID does not exist" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.533757 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.533859 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xzjb\" (UniqueName: \"kubernetes.io/projected/987304c9-45fa-40ab-a687-528d1e8f69d3-kube-api-access-4xzjb\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.533883 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/987304c9-45fa-40ab-a687-528d1e8f69d3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.533900 4708 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/987304c9-45fa-40ab-a687-528d1e8f69d3-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.539778 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xpdpr"] Feb 03 07:15:32 crc kubenswrapper[4708]: I0203 07:15:32.542462 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xpdpr"] Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.753982 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb"] Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.754984 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1edd916-a3bf-4331-abba-d5c8753d4377" containerName="controller-manager" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.755061 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1edd916-a3bf-4331-abba-d5c8753d4377" containerName="controller-manager" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.755119 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerName="extract-utilities" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.755429 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerName="extract-utilities" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.755507 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.755562 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.755617 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.755672 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.755734 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerName="extract-content" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.755808 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerName="extract-content" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.755879 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerName="extract-content" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.755935 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerName="extract-content" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.755994 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerName="extract-utilities" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.756054 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerName="extract-utilities" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.756114 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42939d12-477f-4186-9d74-1b62ca36d039" containerName="extract-utilities" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.756184 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="42939d12-477f-4186-9d74-1b62ca36d039" containerName="extract-utilities" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.756245 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerName="extract-content" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.756305 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerName="extract-content" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.756362 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.756421 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.756482 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.756559 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.756648 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerName="extract-utilities" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.756719 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerName="extract-utilities" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.756816 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="987304c9-45fa-40ab-a687-528d1e8f69d3" containerName="route-controller-manager" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.756899 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="987304c9-45fa-40ab-a687-528d1e8f69d3" containerName="route-controller-manager" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.756982 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42939d12-477f-4186-9d74-1b62ca36d039" containerName="extract-content" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.757050 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="42939d12-477f-4186-9d74-1b62ca36d039" containerName="extract-content" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.757123 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00c9d661-6c2e-48e7-9747-1476d52290a8" containerName="marketplace-operator" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.757191 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="00c9d661-6c2e-48e7-9747-1476d52290a8" containerName="marketplace-operator" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.757261 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e78f23f0-a7a1-489d-a40f-05d722cc29ec" containerName="collect-profiles" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.757341 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e78f23f0-a7a1-489d-a40f-05d722cc29ec" containerName="collect-profiles" Feb 03 07:15:33 crc kubenswrapper[4708]: E0203 07:15:33.757411 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42939d12-477f-4186-9d74-1b62ca36d039" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.757479 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="42939d12-477f-4186-9d74-1b62ca36d039" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.757665 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="70bd64d0-0ea0-4c56-9e7f-fc150343c834" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.757747 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="987304c9-45fa-40ab-a687-528d1e8f69d3" containerName="route-controller-manager" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.757847 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="22892077-113b-4859-81cb-9ec0e6fc60ea" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.757943 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1edd916-a3bf-4331-abba-d5c8753d4377" containerName="controller-manager" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.758018 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="00c9d661-6c2e-48e7-9747-1476d52290a8" containerName="marketplace-operator" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.758086 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="267fbe93-1af6-4a87-9720-c9d5cae93c91" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.758169 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.758258 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="42939d12-477f-4186-9d74-1b62ca36d039" containerName="registry-server" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.758335 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e78f23f0-a7a1-489d-a40f-05d722cc29ec" containerName="collect-profiles" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.758842 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g"] Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.759018 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.760196 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.765063 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.765383 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb"] Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.766835 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.767058 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.766838 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.767370 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.768096 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.768466 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.768732 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.768877 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.768757 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g"] Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.769005 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.769256 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.773973 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.776107 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.950403 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-config\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.950472 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-client-ca\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.950712 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-proxy-ca-bundles\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.950932 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-client-ca\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.950987 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skvn7\" (UniqueName: \"kubernetes.io/projected/e7b9316b-a726-4f25-8e9a-0291eef22685-kube-api-access-skvn7\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.951103 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmcmv\" (UniqueName: \"kubernetes.io/projected/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-kube-api-access-gmcmv\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.951128 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7b9316b-a726-4f25-8e9a-0291eef22685-serving-cert\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.951156 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-config\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:33 crc kubenswrapper[4708]: I0203 07:15:33.951176 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-serving-cert\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.051955 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skvn7\" (UniqueName: \"kubernetes.io/projected/e7b9316b-a726-4f25-8e9a-0291eef22685-kube-api-access-skvn7\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.052063 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmcmv\" (UniqueName: \"kubernetes.io/projected/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-kube-api-access-gmcmv\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.052102 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7b9316b-a726-4f25-8e9a-0291eef22685-serving-cert\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.052158 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-config\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.052203 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-serving-cert\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.052858 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-config\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.052941 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-proxy-ca-bundles\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.052974 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-client-ca\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.053025 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-client-ca\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.054455 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-config\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.055336 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-proxy-ca-bundles\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.055539 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-config\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.055924 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-client-ca\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.056171 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-client-ca\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.058089 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-serving-cert\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.060095 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7b9316b-a726-4f25-8e9a-0291eef22685-serving-cert\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.070143 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmcmv\" (UniqueName: \"kubernetes.io/projected/65207bf3-4449-44d2-9c98-ea5e16ac8d9b-kube-api-access-gmcmv\") pod \"controller-manager-7c7d8c84dd-hsc2g\" (UID: \"65207bf3-4449-44d2-9c98-ea5e16ac8d9b\") " pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.073162 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skvn7\" (UniqueName: \"kubernetes.io/projected/e7b9316b-a726-4f25-8e9a-0291eef22685-kube-api-access-skvn7\") pod \"route-controller-manager-6b846ddf9c-28ldb\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.076414 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.092236 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.099434 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="987304c9-45fa-40ab-a687-528d1e8f69d3" path="/var/lib/kubelet/pods/987304c9-45fa-40ab-a687-528d1e8f69d3/volumes" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.100319 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1edd916-a3bf-4331-abba-d5c8753d4377" path="/var/lib/kubelet/pods/d1edd916-a3bf-4331-abba-d5c8753d4377/volumes" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.277244 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb"] Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.315226 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g"] Feb 03 07:15:34 crc kubenswrapper[4708]: W0203 07:15:34.321023 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65207bf3_4449_44d2_9c98_ea5e16ac8d9b.slice/crio-c91634a699bb36769e5f95b4bbf692c0b0ccad70b6c87edb7ec11dd0ff5dd749 WatchSource:0}: Error finding container c91634a699bb36769e5f95b4bbf692c0b0ccad70b6c87edb7ec11dd0ff5dd749: Status 404 returned error can't find the container with id c91634a699bb36769e5f95b4bbf692c0b0ccad70b6c87edb7ec11dd0ff5dd749 Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.509984 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" event={"ID":"e7b9316b-a726-4f25-8e9a-0291eef22685","Type":"ContainerStarted","Data":"b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803"} Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.510043 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" event={"ID":"e7b9316b-a726-4f25-8e9a-0291eef22685","Type":"ContainerStarted","Data":"1b694578801fe1654bddf89587b3ac31a870ff5ea31d9151dd377682e8e57d46"} Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.511206 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.514333 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" event={"ID":"65207bf3-4449-44d2-9c98-ea5e16ac8d9b","Type":"ContainerStarted","Data":"629b959dddd249a25c0d703f082a2d039c5fc8c703e54982f2c0696af70f8cf7"} Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.514367 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" event={"ID":"65207bf3-4449-44d2-9c98-ea5e16ac8d9b","Type":"ContainerStarted","Data":"c91634a699bb36769e5f95b4bbf692c0b0ccad70b6c87edb7ec11dd0ff5dd749"} Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.515184 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.519998 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.529015 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" podStartSLOduration=2.5289880399999998 podStartE2EDuration="2.52898804s" podCreationTimestamp="2026-02-03 07:15:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:15:34.52649825 +0000 UTC m=+313.508445127" watchObservedRunningTime="2026-02-03 07:15:34.52898804 +0000 UTC m=+313.510934857" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.551420 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7c7d8c84dd-hsc2g" podStartSLOduration=3.551397345 podStartE2EDuration="3.551397345s" podCreationTimestamp="2026-02-03 07:15:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:15:34.54781236 +0000 UTC m=+313.529759187" watchObservedRunningTime="2026-02-03 07:15:34.551397345 +0000 UTC m=+313.533344152" Feb 03 07:15:34 crc kubenswrapper[4708]: I0203 07:15:34.705271 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:16:23 crc kubenswrapper[4708]: I0203 07:16:23.833686 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:16:23 crc kubenswrapper[4708]: I0203 07:16:23.834525 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.625633 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-kvmsf"] Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.626771 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.644421 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-kvmsf"] Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.813849 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksl44\" (UniqueName: \"kubernetes.io/projected/0528dab0-a486-44bd-8f7c-a07306e15952-kube-api-access-ksl44\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.813895 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0528dab0-a486-44bd-8f7c-a07306e15952-installation-pull-secrets\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.813923 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0528dab0-a486-44bd-8f7c-a07306e15952-bound-sa-token\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.813993 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0528dab0-a486-44bd-8f7c-a07306e15952-registry-tls\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.814030 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0528dab0-a486-44bd-8f7c-a07306e15952-trusted-ca\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.814076 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0528dab0-a486-44bd-8f7c-a07306e15952-registry-certificates\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.814193 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0528dab0-a486-44bd-8f7c-a07306e15952-ca-trust-extracted\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.814291 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.846570 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.916052 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0528dab0-a486-44bd-8f7c-a07306e15952-registry-certificates\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.916120 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0528dab0-a486-44bd-8f7c-a07306e15952-ca-trust-extracted\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.916167 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksl44\" (UniqueName: \"kubernetes.io/projected/0528dab0-a486-44bd-8f7c-a07306e15952-kube-api-access-ksl44\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.916193 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0528dab0-a486-44bd-8f7c-a07306e15952-installation-pull-secrets\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.916224 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0528dab0-a486-44bd-8f7c-a07306e15952-bound-sa-token\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.916249 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0528dab0-a486-44bd-8f7c-a07306e15952-registry-tls\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.916297 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0528dab0-a486-44bd-8f7c-a07306e15952-trusted-ca\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.916879 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0528dab0-a486-44bd-8f7c-a07306e15952-ca-trust-extracted\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.917533 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0528dab0-a486-44bd-8f7c-a07306e15952-registry-certificates\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.918256 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0528dab0-a486-44bd-8f7c-a07306e15952-trusted-ca\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.922784 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0528dab0-a486-44bd-8f7c-a07306e15952-registry-tls\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.925493 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0528dab0-a486-44bd-8f7c-a07306e15952-installation-pull-secrets\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.934040 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0528dab0-a486-44bd-8f7c-a07306e15952-bound-sa-token\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.935080 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksl44\" (UniqueName: \"kubernetes.io/projected/0528dab0-a486-44bd-8f7c-a07306e15952-kube-api-access-ksl44\") pod \"image-registry-66df7c8f76-kvmsf\" (UID: \"0528dab0-a486-44bd-8f7c-a07306e15952\") " pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:26 crc kubenswrapper[4708]: I0203 07:16:26.942112 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:27 crc kubenswrapper[4708]: I0203 07:16:27.335450 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-kvmsf"] Feb 03 07:16:27 crc kubenswrapper[4708]: I0203 07:16:27.815464 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" event={"ID":"0528dab0-a486-44bd-8f7c-a07306e15952","Type":"ContainerStarted","Data":"a5d9f29ce434ffa2418f36b25aa4c3f1cb719bb66766b14764ccaf132b6832d9"} Feb 03 07:16:27 crc kubenswrapper[4708]: I0203 07:16:27.815509 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" event={"ID":"0528dab0-a486-44bd-8f7c-a07306e15952","Type":"ContainerStarted","Data":"46d28e1885468b1b4055eb69a3d87bc667d2b7d216b2d00fcbc02127d093be40"} Feb 03 07:16:27 crc kubenswrapper[4708]: I0203 07:16:27.815662 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:27 crc kubenswrapper[4708]: I0203 07:16:27.840025 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" podStartSLOduration=1.8400066000000002 podStartE2EDuration="1.8400066s" podCreationTimestamp="2026-02-03 07:16:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:16:27.83528342 +0000 UTC m=+366.817230247" watchObservedRunningTime="2026-02-03 07:16:27.8400066 +0000 UTC m=+366.821953417" Feb 03 07:16:31 crc kubenswrapper[4708]: I0203 07:16:31.822468 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb"] Feb 03 07:16:31 crc kubenswrapper[4708]: I0203 07:16:31.823043 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" podUID="e7b9316b-a726-4f25-8e9a-0291eef22685" containerName="route-controller-manager" containerID="cri-o://b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803" gracePeriod=30 Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.221060 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.391970 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-config\") pod \"e7b9316b-a726-4f25-8e9a-0291eef22685\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.392046 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-client-ca\") pod \"e7b9316b-a726-4f25-8e9a-0291eef22685\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.392104 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skvn7\" (UniqueName: \"kubernetes.io/projected/e7b9316b-a726-4f25-8e9a-0291eef22685-kube-api-access-skvn7\") pod \"e7b9316b-a726-4f25-8e9a-0291eef22685\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.392160 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7b9316b-a726-4f25-8e9a-0291eef22685-serving-cert\") pod \"e7b9316b-a726-4f25-8e9a-0291eef22685\" (UID: \"e7b9316b-a726-4f25-8e9a-0291eef22685\") " Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.392730 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-client-ca" (OuterVolumeSpecName: "client-ca") pod "e7b9316b-a726-4f25-8e9a-0291eef22685" (UID: "e7b9316b-a726-4f25-8e9a-0291eef22685"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.393011 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-config" (OuterVolumeSpecName: "config") pod "e7b9316b-a726-4f25-8e9a-0291eef22685" (UID: "e7b9316b-a726-4f25-8e9a-0291eef22685"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.396877 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7b9316b-a726-4f25-8e9a-0291eef22685-kube-api-access-skvn7" (OuterVolumeSpecName: "kube-api-access-skvn7") pod "e7b9316b-a726-4f25-8e9a-0291eef22685" (UID: "e7b9316b-a726-4f25-8e9a-0291eef22685"). InnerVolumeSpecName "kube-api-access-skvn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.397891 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7b9316b-a726-4f25-8e9a-0291eef22685-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7b9316b-a726-4f25-8e9a-0291eef22685" (UID: "e7b9316b-a726-4f25-8e9a-0291eef22685"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.493133 4708 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7b9316b-a726-4f25-8e9a-0291eef22685-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.493175 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.493186 4708 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e7b9316b-a726-4f25-8e9a-0291eef22685-client-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.493197 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skvn7\" (UniqueName: \"kubernetes.io/projected/e7b9316b-a726-4f25-8e9a-0291eef22685-kube-api-access-skvn7\") on node \"crc\" DevicePath \"\"" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.846919 4708 generic.go:334] "Generic (PLEG): container finished" podID="e7b9316b-a726-4f25-8e9a-0291eef22685" containerID="b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803" exitCode=0 Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.846988 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" event={"ID":"e7b9316b-a726-4f25-8e9a-0291eef22685","Type":"ContainerDied","Data":"b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803"} Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.847031 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" event={"ID":"e7b9316b-a726-4f25-8e9a-0291eef22685","Type":"ContainerDied","Data":"1b694578801fe1654bddf89587b3ac31a870ff5ea31d9151dd377682e8e57d46"} Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.847052 4708 scope.go:117] "RemoveContainer" containerID="b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.847975 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.875884 4708 scope.go:117] "RemoveContainer" containerID="b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803" Feb 03 07:16:32 crc kubenswrapper[4708]: E0203 07:16:32.880242 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803\": container with ID starting with b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803 not found: ID does not exist" containerID="b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.881947 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803"} err="failed to get container status \"b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803\": rpc error: code = NotFound desc = could not find container \"b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803\": container with ID starting with b3bcbee63614b24f3b39ad3c85977ee207da74a927048449ba7a1a5dff859803 not found: ID does not exist" Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.884388 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb"] Feb 03 07:16:32 crc kubenswrapper[4708]: I0203 07:16:32.889668 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b846ddf9c-28ldb"] Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.793831 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5"] Feb 03 07:16:33 crc kubenswrapper[4708]: E0203 07:16:33.794194 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7b9316b-a726-4f25-8e9a-0291eef22685" containerName="route-controller-manager" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.794216 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7b9316b-a726-4f25-8e9a-0291eef22685" containerName="route-controller-manager" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.794439 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7b9316b-a726-4f25-8e9a-0291eef22685" containerName="route-controller-manager" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.795405 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.804709 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.804767 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.804867 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.804992 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.805843 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.809055 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5"] Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.809345 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.922018 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eb78844e-5ca9-49d8-9a13-5b50e851001d-serving-cert\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.922091 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khfr9\" (UniqueName: \"kubernetes.io/projected/eb78844e-5ca9-49d8-9a13-5b50e851001d-kube-api-access-khfr9\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.922354 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eb78844e-5ca9-49d8-9a13-5b50e851001d-client-ca\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:33 crc kubenswrapper[4708]: I0203 07:16:33.922561 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb78844e-5ca9-49d8-9a13-5b50e851001d-config\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.023417 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eb78844e-5ca9-49d8-9a13-5b50e851001d-client-ca\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.023477 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb78844e-5ca9-49d8-9a13-5b50e851001d-config\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.023507 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eb78844e-5ca9-49d8-9a13-5b50e851001d-serving-cert\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.023526 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khfr9\" (UniqueName: \"kubernetes.io/projected/eb78844e-5ca9-49d8-9a13-5b50e851001d-kube-api-access-khfr9\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.024683 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eb78844e-5ca9-49d8-9a13-5b50e851001d-client-ca\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.026614 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb78844e-5ca9-49d8-9a13-5b50e851001d-config\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.028590 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eb78844e-5ca9-49d8-9a13-5b50e851001d-serving-cert\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.040088 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khfr9\" (UniqueName: \"kubernetes.io/projected/eb78844e-5ca9-49d8-9a13-5b50e851001d-kube-api-access-khfr9\") pod \"route-controller-manager-68fb5497b4-7z5z5\" (UID: \"eb78844e-5ca9-49d8-9a13-5b50e851001d\") " pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.100725 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7b9316b-a726-4f25-8e9a-0291eef22685" path="/var/lib/kubelet/pods/e7b9316b-a726-4f25-8e9a-0291eef22685/volumes" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.119517 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.504086 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5"] Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.859691 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" event={"ID":"eb78844e-5ca9-49d8-9a13-5b50e851001d","Type":"ContainerStarted","Data":"d722eb77952fa088f9cc4c18da55c4b8c0cde99978d1b65b262b660b8a5f3d21"} Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.860205 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.860284 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" event={"ID":"eb78844e-5ca9-49d8-9a13-5b50e851001d","Type":"ContainerStarted","Data":"4395bfcb44c32360cb1ca1200b80c182040bbdc3c640f9c22e86dd6bfd807328"} Feb 03 07:16:34 crc kubenswrapper[4708]: I0203 07:16:34.876021 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" podStartSLOduration=3.875996271 podStartE2EDuration="3.875996271s" podCreationTimestamp="2026-02-03 07:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:16:34.873849961 +0000 UTC m=+373.855796788" watchObservedRunningTime="2026-02-03 07:16:34.875996271 +0000 UTC m=+373.857943098" Feb 03 07:16:35 crc kubenswrapper[4708]: I0203 07:16:35.165433 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-68fb5497b4-7z5z5" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.496116 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zkk2w"] Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.501357 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.504485 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.514765 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zkk2w"] Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.659679 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f881ea5f-3c53-4524-8999-6ecbfaf5dfef-catalog-content\") pod \"community-operators-zkk2w\" (UID: \"f881ea5f-3c53-4524-8999-6ecbfaf5dfef\") " pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.659781 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f881ea5f-3c53-4524-8999-6ecbfaf5dfef-utilities\") pod \"community-operators-zkk2w\" (UID: \"f881ea5f-3c53-4524-8999-6ecbfaf5dfef\") " pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.659835 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czrgz\" (UniqueName: \"kubernetes.io/projected/f881ea5f-3c53-4524-8999-6ecbfaf5dfef-kube-api-access-czrgz\") pod \"community-operators-zkk2w\" (UID: \"f881ea5f-3c53-4524-8999-6ecbfaf5dfef\") " pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.693872 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rpf9t"] Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.703262 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rpf9t"] Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.703461 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.706155 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.761614 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f881ea5f-3c53-4524-8999-6ecbfaf5dfef-catalog-content\") pod \"community-operators-zkk2w\" (UID: \"f881ea5f-3c53-4524-8999-6ecbfaf5dfef\") " pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.761763 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f881ea5f-3c53-4524-8999-6ecbfaf5dfef-utilities\") pod \"community-operators-zkk2w\" (UID: \"f881ea5f-3c53-4524-8999-6ecbfaf5dfef\") " pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.761890 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czrgz\" (UniqueName: \"kubernetes.io/projected/f881ea5f-3c53-4524-8999-6ecbfaf5dfef-kube-api-access-czrgz\") pod \"community-operators-zkk2w\" (UID: \"f881ea5f-3c53-4524-8999-6ecbfaf5dfef\") " pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.763885 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f881ea5f-3c53-4524-8999-6ecbfaf5dfef-utilities\") pod \"community-operators-zkk2w\" (UID: \"f881ea5f-3c53-4524-8999-6ecbfaf5dfef\") " pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.767145 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f881ea5f-3c53-4524-8999-6ecbfaf5dfef-catalog-content\") pod \"community-operators-zkk2w\" (UID: \"f881ea5f-3c53-4524-8999-6ecbfaf5dfef\") " pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.794150 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czrgz\" (UniqueName: \"kubernetes.io/projected/f881ea5f-3c53-4524-8999-6ecbfaf5dfef-kube-api-access-czrgz\") pod \"community-operators-zkk2w\" (UID: \"f881ea5f-3c53-4524-8999-6ecbfaf5dfef\") " pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.821135 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.863073 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgbkm\" (UniqueName: \"kubernetes.io/projected/26447fe3-af8b-43e8-8aa8-e2e29f5639c1-kube-api-access-hgbkm\") pod \"redhat-operators-rpf9t\" (UID: \"26447fe3-af8b-43e8-8aa8-e2e29f5639c1\") " pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.863141 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26447fe3-af8b-43e8-8aa8-e2e29f5639c1-utilities\") pod \"redhat-operators-rpf9t\" (UID: \"26447fe3-af8b-43e8-8aa8-e2e29f5639c1\") " pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.863159 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26447fe3-af8b-43e8-8aa8-e2e29f5639c1-catalog-content\") pod \"redhat-operators-rpf9t\" (UID: \"26447fe3-af8b-43e8-8aa8-e2e29f5639c1\") " pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.964547 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgbkm\" (UniqueName: \"kubernetes.io/projected/26447fe3-af8b-43e8-8aa8-e2e29f5639c1-kube-api-access-hgbkm\") pod \"redhat-operators-rpf9t\" (UID: \"26447fe3-af8b-43e8-8aa8-e2e29f5639c1\") " pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.964602 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26447fe3-af8b-43e8-8aa8-e2e29f5639c1-utilities\") pod \"redhat-operators-rpf9t\" (UID: \"26447fe3-af8b-43e8-8aa8-e2e29f5639c1\") " pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.964624 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26447fe3-af8b-43e8-8aa8-e2e29f5639c1-catalog-content\") pod \"redhat-operators-rpf9t\" (UID: \"26447fe3-af8b-43e8-8aa8-e2e29f5639c1\") " pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.965178 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26447fe3-af8b-43e8-8aa8-e2e29f5639c1-catalog-content\") pod \"redhat-operators-rpf9t\" (UID: \"26447fe3-af8b-43e8-8aa8-e2e29f5639c1\") " pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.965215 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26447fe3-af8b-43e8-8aa8-e2e29f5639c1-utilities\") pod \"redhat-operators-rpf9t\" (UID: \"26447fe3-af8b-43e8-8aa8-e2e29f5639c1\") " pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:43 crc kubenswrapper[4708]: I0203 07:16:43.982936 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgbkm\" (UniqueName: \"kubernetes.io/projected/26447fe3-af8b-43e8-8aa8-e2e29f5639c1-kube-api-access-hgbkm\") pod \"redhat-operators-rpf9t\" (UID: \"26447fe3-af8b-43e8-8aa8-e2e29f5639c1\") " pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:44 crc kubenswrapper[4708]: I0203 07:16:44.019885 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:44 crc kubenswrapper[4708]: W0203 07:16:44.058618 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf881ea5f_3c53_4524_8999_6ecbfaf5dfef.slice/crio-02aefdc5266935f6e1a75ad92e0bf9e6de3350c8b2198b771bf207431eebb08d WatchSource:0}: Error finding container 02aefdc5266935f6e1a75ad92e0bf9e6de3350c8b2198b771bf207431eebb08d: Status 404 returned error can't find the container with id 02aefdc5266935f6e1a75ad92e0bf9e6de3350c8b2198b771bf207431eebb08d Feb 03 07:16:44 crc kubenswrapper[4708]: I0203 07:16:44.066421 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zkk2w"] Feb 03 07:16:44 crc kubenswrapper[4708]: I0203 07:16:44.421939 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rpf9t"] Feb 03 07:16:44 crc kubenswrapper[4708]: W0203 07:16:44.429038 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26447fe3_af8b_43e8_8aa8_e2e29f5639c1.slice/crio-5f31d2abea806a2904f8869eca871510a37bba471150fa835abcde3dd31bd56f WatchSource:0}: Error finding container 5f31d2abea806a2904f8869eca871510a37bba471150fa835abcde3dd31bd56f: Status 404 returned error can't find the container with id 5f31d2abea806a2904f8869eca871510a37bba471150fa835abcde3dd31bd56f Feb 03 07:16:44 crc kubenswrapper[4708]: I0203 07:16:44.911100 4708 generic.go:334] "Generic (PLEG): container finished" podID="26447fe3-af8b-43e8-8aa8-e2e29f5639c1" containerID="6790fa9411b174c936bc32208fddb4e7e39339e13c405bac7c97349e87668b75" exitCode=0 Feb 03 07:16:44 crc kubenswrapper[4708]: I0203 07:16:44.911238 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rpf9t" event={"ID":"26447fe3-af8b-43e8-8aa8-e2e29f5639c1","Type":"ContainerDied","Data":"6790fa9411b174c936bc32208fddb4e7e39339e13c405bac7c97349e87668b75"} Feb 03 07:16:44 crc kubenswrapper[4708]: I0203 07:16:44.911478 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rpf9t" event={"ID":"26447fe3-af8b-43e8-8aa8-e2e29f5639c1","Type":"ContainerStarted","Data":"5f31d2abea806a2904f8869eca871510a37bba471150fa835abcde3dd31bd56f"} Feb 03 07:16:44 crc kubenswrapper[4708]: I0203 07:16:44.913411 4708 generic.go:334] "Generic (PLEG): container finished" podID="f881ea5f-3c53-4524-8999-6ecbfaf5dfef" containerID="c859e19557c71530dee56f57d839a2105dd004d11d2f7ce121a9eaaccffe19f7" exitCode=0 Feb 03 07:16:44 crc kubenswrapper[4708]: I0203 07:16:44.913478 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkk2w" event={"ID":"f881ea5f-3c53-4524-8999-6ecbfaf5dfef","Type":"ContainerDied","Data":"c859e19557c71530dee56f57d839a2105dd004d11d2f7ce121a9eaaccffe19f7"} Feb 03 07:16:44 crc kubenswrapper[4708]: I0203 07:16:44.913508 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkk2w" event={"ID":"f881ea5f-3c53-4524-8999-6ecbfaf5dfef","Type":"ContainerStarted","Data":"02aefdc5266935f6e1a75ad92e0bf9e6de3350c8b2198b771bf207431eebb08d"} Feb 03 07:16:45 crc kubenswrapper[4708]: I0203 07:16:45.929275 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkk2w" event={"ID":"f881ea5f-3c53-4524-8999-6ecbfaf5dfef","Type":"ContainerStarted","Data":"33f0aebf05dfde2b8e3b64d69d4c95935124e6dfac257d1447c4b27d0d99d1b4"} Feb 03 07:16:45 crc kubenswrapper[4708]: I0203 07:16:45.933340 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rpf9t" event={"ID":"26447fe3-af8b-43e8-8aa8-e2e29f5639c1","Type":"ContainerStarted","Data":"b72b587476baafd41abdcaeb84409d58683d6b42e3a74870bb77c22a82bb98c4"} Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.083125 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9glc4"] Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.084301 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.086840 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.112902 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9glc4"] Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.196534 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sf68\" (UniqueName: \"kubernetes.io/projected/849de51a-5755-4905-8627-1cc76e9e4647-kube-api-access-4sf68\") pod \"certified-operators-9glc4\" (UID: \"849de51a-5755-4905-8627-1cc76e9e4647\") " pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.196568 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/849de51a-5755-4905-8627-1cc76e9e4647-catalog-content\") pod \"certified-operators-9glc4\" (UID: \"849de51a-5755-4905-8627-1cc76e9e4647\") " pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.196651 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/849de51a-5755-4905-8627-1cc76e9e4647-utilities\") pod \"certified-operators-9glc4\" (UID: \"849de51a-5755-4905-8627-1cc76e9e4647\") " pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.298207 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/849de51a-5755-4905-8627-1cc76e9e4647-utilities\") pod \"certified-operators-9glc4\" (UID: \"849de51a-5755-4905-8627-1cc76e9e4647\") " pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.298317 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sf68\" (UniqueName: \"kubernetes.io/projected/849de51a-5755-4905-8627-1cc76e9e4647-kube-api-access-4sf68\") pod \"certified-operators-9glc4\" (UID: \"849de51a-5755-4905-8627-1cc76e9e4647\") " pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.298353 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/849de51a-5755-4905-8627-1cc76e9e4647-catalog-content\") pod \"certified-operators-9glc4\" (UID: \"849de51a-5755-4905-8627-1cc76e9e4647\") " pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.298983 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/849de51a-5755-4905-8627-1cc76e9e4647-utilities\") pod \"certified-operators-9glc4\" (UID: \"849de51a-5755-4905-8627-1cc76e9e4647\") " pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.299117 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/849de51a-5755-4905-8627-1cc76e9e4647-catalog-content\") pod \"certified-operators-9glc4\" (UID: \"849de51a-5755-4905-8627-1cc76e9e4647\") " pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.303142 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zh966"] Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.304392 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.306364 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.313478 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zh966"] Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.355117 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sf68\" (UniqueName: \"kubernetes.io/projected/849de51a-5755-4905-8627-1cc76e9e4647-kube-api-access-4sf68\") pod \"certified-operators-9glc4\" (UID: \"849de51a-5755-4905-8627-1cc76e9e4647\") " pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.416827 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.503611 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a265481-0560-4311-8886-1e3a833e487d-catalog-content\") pod \"redhat-marketplace-zh966\" (UID: \"9a265481-0560-4311-8886-1e3a833e487d\") " pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.503994 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a265481-0560-4311-8886-1e3a833e487d-utilities\") pod \"redhat-marketplace-zh966\" (UID: \"9a265481-0560-4311-8886-1e3a833e487d\") " pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.504024 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g95h7\" (UniqueName: \"kubernetes.io/projected/9a265481-0560-4311-8886-1e3a833e487d-kube-api-access-g95h7\") pod \"redhat-marketplace-zh966\" (UID: \"9a265481-0560-4311-8886-1e3a833e487d\") " pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.604936 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a265481-0560-4311-8886-1e3a833e487d-catalog-content\") pod \"redhat-marketplace-zh966\" (UID: \"9a265481-0560-4311-8886-1e3a833e487d\") " pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.605010 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a265481-0560-4311-8886-1e3a833e487d-utilities\") pod \"redhat-marketplace-zh966\" (UID: \"9a265481-0560-4311-8886-1e3a833e487d\") " pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.605041 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g95h7\" (UniqueName: \"kubernetes.io/projected/9a265481-0560-4311-8886-1e3a833e487d-kube-api-access-g95h7\") pod \"redhat-marketplace-zh966\" (UID: \"9a265481-0560-4311-8886-1e3a833e487d\") " pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.605490 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a265481-0560-4311-8886-1e3a833e487d-catalog-content\") pod \"redhat-marketplace-zh966\" (UID: \"9a265481-0560-4311-8886-1e3a833e487d\") " pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.605490 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a265481-0560-4311-8886-1e3a833e487d-utilities\") pod \"redhat-marketplace-zh966\" (UID: \"9a265481-0560-4311-8886-1e3a833e487d\") " pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.632032 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g95h7\" (UniqueName: \"kubernetes.io/projected/9a265481-0560-4311-8886-1e3a833e487d-kube-api-access-g95h7\") pod \"redhat-marketplace-zh966\" (UID: \"9a265481-0560-4311-8886-1e3a833e487d\") " pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:46 crc kubenswrapper[4708]: I0203 07:16:46.635658 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9glc4"] Feb 03 07:16:47 crc kubenswrapper[4708]: W0203 07:16:46.652332 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod849de51a_5755_4905_8627_1cc76e9e4647.slice/crio-22a90f9d7088f073f3e3d25809520c2768c0d5667ff38072c5fa2884aa76f1df WatchSource:0}: Error finding container 22a90f9d7088f073f3e3d25809520c2768c0d5667ff38072c5fa2884aa76f1df: Status 404 returned error can't find the container with id 22a90f9d7088f073f3e3d25809520c2768c0d5667ff38072c5fa2884aa76f1df Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:46.680841 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:46.955277 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-kvmsf" Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:46.959630 4708 generic.go:334] "Generic (PLEG): container finished" podID="849de51a-5755-4905-8627-1cc76e9e4647" containerID="5a7e9feb4897360b95dcb250edfa66d8849d4ef23e9570071c2912dc70c73e8c" exitCode=0 Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:46.959685 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9glc4" event={"ID":"849de51a-5755-4905-8627-1cc76e9e4647","Type":"ContainerDied","Data":"5a7e9feb4897360b95dcb250edfa66d8849d4ef23e9570071c2912dc70c73e8c"} Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:46.959708 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9glc4" event={"ID":"849de51a-5755-4905-8627-1cc76e9e4647","Type":"ContainerStarted","Data":"22a90f9d7088f073f3e3d25809520c2768c0d5667ff38072c5fa2884aa76f1df"} Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:46.963341 4708 generic.go:334] "Generic (PLEG): container finished" podID="f881ea5f-3c53-4524-8999-6ecbfaf5dfef" containerID="33f0aebf05dfde2b8e3b64d69d4c95935124e6dfac257d1447c4b27d0d99d1b4" exitCode=0 Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:46.963401 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkk2w" event={"ID":"f881ea5f-3c53-4524-8999-6ecbfaf5dfef","Type":"ContainerDied","Data":"33f0aebf05dfde2b8e3b64d69d4c95935124e6dfac257d1447c4b27d0d99d1b4"} Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:46.967939 4708 generic.go:334] "Generic (PLEG): container finished" podID="26447fe3-af8b-43e8-8aa8-e2e29f5639c1" containerID="b72b587476baafd41abdcaeb84409d58683d6b42e3a74870bb77c22a82bb98c4" exitCode=0 Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:46.967981 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rpf9t" event={"ID":"26447fe3-af8b-43e8-8aa8-e2e29f5639c1","Type":"ContainerDied","Data":"b72b587476baafd41abdcaeb84409d58683d6b42e3a74870bb77c22a82bb98c4"} Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:47.026350 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nqhrk"] Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:47.808317 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zh966"] Feb 03 07:16:47 crc kubenswrapper[4708]: W0203 07:16:47.814114 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a265481_0560_4311_8886_1e3a833e487d.slice/crio-c02ef29190de51844645d600c65ad552f4823a531f15fba31b9e0268b6f528b8 WatchSource:0}: Error finding container c02ef29190de51844645d600c65ad552f4823a531f15fba31b9e0268b6f528b8: Status 404 returned error can't find the container with id c02ef29190de51844645d600c65ad552f4823a531f15fba31b9e0268b6f528b8 Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:47.975927 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9glc4" event={"ID":"849de51a-5755-4905-8627-1cc76e9e4647","Type":"ContainerStarted","Data":"f768571fe546d3cc9518d329129b4a0aa94c77d4c2724ba6606b9502711d40fc"} Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:47.977729 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zkk2w" event={"ID":"f881ea5f-3c53-4524-8999-6ecbfaf5dfef","Type":"ContainerStarted","Data":"b89cca2c66accc1a28657365bc349e31b6144a8e7ab5fdca88c3abee27cd57a0"} Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:47.986268 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rpf9t" event={"ID":"26447fe3-af8b-43e8-8aa8-e2e29f5639c1","Type":"ContainerStarted","Data":"8e7ff604e1f74d0a2f07fe0292ce180cdd0549d25b49f9ee22e9acb7b53a0e8c"} Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:47.998833 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh966" event={"ID":"9a265481-0560-4311-8886-1e3a833e487d","Type":"ContainerStarted","Data":"21de4426ba9d7d9aa05bddad80f23790ed57def60e2918815782da3d18d2a462"} Feb 03 07:16:47 crc kubenswrapper[4708]: I0203 07:16:47.998871 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh966" event={"ID":"9a265481-0560-4311-8886-1e3a833e487d","Type":"ContainerStarted","Data":"c02ef29190de51844645d600c65ad552f4823a531f15fba31b9e0268b6f528b8"} Feb 03 07:16:48 crc kubenswrapper[4708]: I0203 07:16:48.020551 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rpf9t" podStartSLOduration=2.402876966 podStartE2EDuration="5.020514168s" podCreationTimestamp="2026-02-03 07:16:43 +0000 UTC" firstStartedPulling="2026-02-03 07:16:44.918631587 +0000 UTC m=+383.900578394" lastFinishedPulling="2026-02-03 07:16:47.536268789 +0000 UTC m=+386.518215596" observedRunningTime="2026-02-03 07:16:48.014549899 +0000 UTC m=+386.996496706" watchObservedRunningTime="2026-02-03 07:16:48.020514168 +0000 UTC m=+387.002460975" Feb 03 07:16:48 crc kubenswrapper[4708]: I0203 07:16:48.039013 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zkk2w" podStartSLOduration=2.536328741 podStartE2EDuration="5.038991108s" podCreationTimestamp="2026-02-03 07:16:43 +0000 UTC" firstStartedPulling="2026-02-03 07:16:44.915943264 +0000 UTC m=+383.897890111" lastFinishedPulling="2026-02-03 07:16:47.418605651 +0000 UTC m=+386.400552478" observedRunningTime="2026-02-03 07:16:48.035111787 +0000 UTC m=+387.017058594" watchObservedRunningTime="2026-02-03 07:16:48.038991108 +0000 UTC m=+387.020937915" Feb 03 07:16:49 crc kubenswrapper[4708]: I0203 07:16:49.005954 4708 generic.go:334] "Generic (PLEG): container finished" podID="849de51a-5755-4905-8627-1cc76e9e4647" containerID="f768571fe546d3cc9518d329129b4a0aa94c77d4c2724ba6606b9502711d40fc" exitCode=0 Feb 03 07:16:49 crc kubenswrapper[4708]: I0203 07:16:49.006215 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9glc4" event={"ID":"849de51a-5755-4905-8627-1cc76e9e4647","Type":"ContainerDied","Data":"f768571fe546d3cc9518d329129b4a0aa94c77d4c2724ba6606b9502711d40fc"} Feb 03 07:16:49 crc kubenswrapper[4708]: I0203 07:16:49.013101 4708 generic.go:334] "Generic (PLEG): container finished" podID="9a265481-0560-4311-8886-1e3a833e487d" containerID="21de4426ba9d7d9aa05bddad80f23790ed57def60e2918815782da3d18d2a462" exitCode=0 Feb 03 07:16:49 crc kubenswrapper[4708]: I0203 07:16:49.014015 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh966" event={"ID":"9a265481-0560-4311-8886-1e3a833e487d","Type":"ContainerDied","Data":"21de4426ba9d7d9aa05bddad80f23790ed57def60e2918815782da3d18d2a462"} Feb 03 07:16:49 crc kubenswrapper[4708]: I0203 07:16:49.014037 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh966" event={"ID":"9a265481-0560-4311-8886-1e3a833e487d","Type":"ContainerStarted","Data":"2e0ec3c864f2dfd4f13c52db36ab04d9c8cc4e38417900ecf6fca11ebd0d13a1"} Feb 03 07:16:50 crc kubenswrapper[4708]: I0203 07:16:50.021233 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9glc4" event={"ID":"849de51a-5755-4905-8627-1cc76e9e4647","Type":"ContainerStarted","Data":"d9f45e6067cbcbbd783141097e7e8cb022646dffa4d7f533dab995ea729bc313"} Feb 03 07:16:50 crc kubenswrapper[4708]: I0203 07:16:50.023867 4708 generic.go:334] "Generic (PLEG): container finished" podID="9a265481-0560-4311-8886-1e3a833e487d" containerID="2e0ec3c864f2dfd4f13c52db36ab04d9c8cc4e38417900ecf6fca11ebd0d13a1" exitCode=0 Feb 03 07:16:50 crc kubenswrapper[4708]: I0203 07:16:50.023971 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh966" event={"ID":"9a265481-0560-4311-8886-1e3a833e487d","Type":"ContainerDied","Data":"2e0ec3c864f2dfd4f13c52db36ab04d9c8cc4e38417900ecf6fca11ebd0d13a1"} Feb 03 07:16:50 crc kubenswrapper[4708]: I0203 07:16:50.024090 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh966" event={"ID":"9a265481-0560-4311-8886-1e3a833e487d","Type":"ContainerStarted","Data":"95bc4b068ec65490c8604c94fd64b688cc1929b3ecafee0dd2f7dac6e574b4a1"} Feb 03 07:16:50 crc kubenswrapper[4708]: I0203 07:16:50.078898 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9glc4" podStartSLOduration=1.593546042 podStartE2EDuration="4.078877666s" podCreationTimestamp="2026-02-03 07:16:46 +0000 UTC" firstStartedPulling="2026-02-03 07:16:46.96089419 +0000 UTC m=+385.942840997" lastFinishedPulling="2026-02-03 07:16:49.446225814 +0000 UTC m=+388.428172621" observedRunningTime="2026-02-03 07:16:50.046075263 +0000 UTC m=+389.028022070" watchObservedRunningTime="2026-02-03 07:16:50.078877666 +0000 UTC m=+389.060824473" Feb 03 07:16:53 crc kubenswrapper[4708]: I0203 07:16:53.821605 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:53 crc kubenswrapper[4708]: I0203 07:16:53.822197 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:53 crc kubenswrapper[4708]: I0203 07:16:53.832922 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:16:53 crc kubenswrapper[4708]: I0203 07:16:53.833021 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:16:53 crc kubenswrapper[4708]: I0203 07:16:53.872059 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:53 crc kubenswrapper[4708]: I0203 07:16:53.895558 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zh966" podStartSLOduration=6.371648492 podStartE2EDuration="7.895531454s" podCreationTimestamp="2026-02-03 07:16:46 +0000 UTC" firstStartedPulling="2026-02-03 07:16:48.000029091 +0000 UTC m=+386.981975918" lastFinishedPulling="2026-02-03 07:16:49.523912073 +0000 UTC m=+388.505858880" observedRunningTime="2026-02-03 07:16:50.079393178 +0000 UTC m=+389.061339995" watchObservedRunningTime="2026-02-03 07:16:53.895531454 +0000 UTC m=+392.877478271" Feb 03 07:16:54 crc kubenswrapper[4708]: I0203 07:16:54.020731 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:54 crc kubenswrapper[4708]: I0203 07:16:54.020772 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:54 crc kubenswrapper[4708]: I0203 07:16:54.103258 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:54 crc kubenswrapper[4708]: I0203 07:16:54.107151 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zkk2w" Feb 03 07:16:54 crc kubenswrapper[4708]: I0203 07:16:54.148548 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rpf9t" Feb 03 07:16:56 crc kubenswrapper[4708]: I0203 07:16:56.417761 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:56 crc kubenswrapper[4708]: I0203 07:16:56.418239 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:56 crc kubenswrapper[4708]: I0203 07:16:56.465025 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:16:56 crc kubenswrapper[4708]: I0203 07:16:56.681547 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:56 crc kubenswrapper[4708]: I0203 07:16:56.681646 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:56 crc kubenswrapper[4708]: I0203 07:16:56.729206 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:57 crc kubenswrapper[4708]: I0203 07:16:57.128692 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zh966" Feb 03 07:16:57 crc kubenswrapper[4708]: I0203 07:16:57.131434 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9glc4" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.069678 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" podUID="c35d1e2c-9135-4bff-a0af-cd20addc6134" containerName="registry" containerID="cri-o://9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4" gracePeriod=30 Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.523552 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.712916 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"c35d1e2c-9135-4bff-a0af-cd20addc6134\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.712997 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c35d1e2c-9135-4bff-a0af-cd20addc6134-installation-pull-secrets\") pod \"c35d1e2c-9135-4bff-a0af-cd20addc6134\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.713046 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-bound-sa-token\") pod \"c35d1e2c-9135-4bff-a0af-cd20addc6134\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.713152 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-certificates\") pod \"c35d1e2c-9135-4bff-a0af-cd20addc6134\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.713216 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6ks9\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-kube-api-access-q6ks9\") pod \"c35d1e2c-9135-4bff-a0af-cd20addc6134\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.713325 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c35d1e2c-9135-4bff-a0af-cd20addc6134-ca-trust-extracted\") pod \"c35d1e2c-9135-4bff-a0af-cd20addc6134\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.713411 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-tls\") pod \"c35d1e2c-9135-4bff-a0af-cd20addc6134\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.713473 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-trusted-ca\") pod \"c35d1e2c-9135-4bff-a0af-cd20addc6134\" (UID: \"c35d1e2c-9135-4bff-a0af-cd20addc6134\") " Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.714779 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "c35d1e2c-9135-4bff-a0af-cd20addc6134" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.714861 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "c35d1e2c-9135-4bff-a0af-cd20addc6134" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.715544 4708 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.715585 4708 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.719747 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-kube-api-access-q6ks9" (OuterVolumeSpecName: "kube-api-access-q6ks9") pod "c35d1e2c-9135-4bff-a0af-cd20addc6134" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134"). InnerVolumeSpecName "kube-api-access-q6ks9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.720497 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c35d1e2c-9135-4bff-a0af-cd20addc6134-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "c35d1e2c-9135-4bff-a0af-cd20addc6134" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.721361 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "c35d1e2c-9135-4bff-a0af-cd20addc6134" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.721848 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "c35d1e2c-9135-4bff-a0af-cd20addc6134" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.727384 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "c35d1e2c-9135-4bff-a0af-cd20addc6134" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.754135 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c35d1e2c-9135-4bff-a0af-cd20addc6134-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "c35d1e2c-9135-4bff-a0af-cd20addc6134" (UID: "c35d1e2c-9135-4bff-a0af-cd20addc6134"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.817341 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6ks9\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-kube-api-access-q6ks9\") on node \"crc\" DevicePath \"\"" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.817396 4708 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c35d1e2c-9135-4bff-a0af-cd20addc6134-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.817416 4708 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.817431 4708 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c35d1e2c-9135-4bff-a0af-cd20addc6134-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 03 07:17:12 crc kubenswrapper[4708]: I0203 07:17:12.817447 4708 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c35d1e2c-9135-4bff-a0af-cd20addc6134-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 03 07:17:13 crc kubenswrapper[4708]: I0203 07:17:13.168849 4708 generic.go:334] "Generic (PLEG): container finished" podID="c35d1e2c-9135-4bff-a0af-cd20addc6134" containerID="9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4" exitCode=0 Feb 03 07:17:13 crc kubenswrapper[4708]: I0203 07:17:13.168923 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" Feb 03 07:17:13 crc kubenswrapper[4708]: I0203 07:17:13.168923 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" event={"ID":"c35d1e2c-9135-4bff-a0af-cd20addc6134","Type":"ContainerDied","Data":"9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4"} Feb 03 07:17:13 crc kubenswrapper[4708]: I0203 07:17:13.169005 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nqhrk" event={"ID":"c35d1e2c-9135-4bff-a0af-cd20addc6134","Type":"ContainerDied","Data":"7fde67b9a9ce4ffe7e2b80bae2791ba5dc7f9f04a407768d34dc26aabfaf969c"} Feb 03 07:17:13 crc kubenswrapper[4708]: I0203 07:17:13.169028 4708 scope.go:117] "RemoveContainer" containerID="9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4" Feb 03 07:17:13 crc kubenswrapper[4708]: I0203 07:17:13.186573 4708 scope.go:117] "RemoveContainer" containerID="9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4" Feb 03 07:17:13 crc kubenswrapper[4708]: E0203 07:17:13.187011 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4\": container with ID starting with 9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4 not found: ID does not exist" containerID="9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4" Feb 03 07:17:13 crc kubenswrapper[4708]: I0203 07:17:13.187060 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4"} err="failed to get container status \"9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4\": rpc error: code = NotFound desc = could not find container \"9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4\": container with ID starting with 9cfd7f3c07fd39c9a20807db1f88553420a914207625a35139033785648be6d4 not found: ID does not exist" Feb 03 07:17:13 crc kubenswrapper[4708]: I0203 07:17:13.201691 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nqhrk"] Feb 03 07:17:13 crc kubenswrapper[4708]: I0203 07:17:13.219285 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nqhrk"] Feb 03 07:17:14 crc kubenswrapper[4708]: I0203 07:17:14.100480 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c35d1e2c-9135-4bff-a0af-cd20addc6134" path="/var/lib/kubelet/pods/c35d1e2c-9135-4bff-a0af-cd20addc6134/volumes" Feb 03 07:17:23 crc kubenswrapper[4708]: I0203 07:17:23.833592 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:17:23 crc kubenswrapper[4708]: I0203 07:17:23.834420 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:17:23 crc kubenswrapper[4708]: I0203 07:17:23.834487 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:17:23 crc kubenswrapper[4708]: I0203 07:17:23.835301 4708 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b3d08046058bec9282f069dc43715820de60eb6b7be4c972144f0a3216e6f3f3"} pod="openshift-machine-config-operator/machine-config-daemon-r94bn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:17:23 crc kubenswrapper[4708]: I0203 07:17:23.835389 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" containerID="cri-o://b3d08046058bec9282f069dc43715820de60eb6b7be4c972144f0a3216e6f3f3" gracePeriod=600 Feb 03 07:17:24 crc kubenswrapper[4708]: I0203 07:17:24.241542 4708 generic.go:334] "Generic (PLEG): container finished" podID="67498414-5132-496e-9638-189f5941ace0" containerID="b3d08046058bec9282f069dc43715820de60eb6b7be4c972144f0a3216e6f3f3" exitCode=0 Feb 03 07:17:24 crc kubenswrapper[4708]: I0203 07:17:24.241638 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerDied","Data":"b3d08046058bec9282f069dc43715820de60eb6b7be4c972144f0a3216e6f3f3"} Feb 03 07:17:24 crc kubenswrapper[4708]: I0203 07:17:24.241979 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"b515af69685be553960030cd44dc51009b8fa698b76226a5a9defa9717522d12"} Feb 03 07:17:24 crc kubenswrapper[4708]: I0203 07:17:24.242001 4708 scope.go:117] "RemoveContainer" containerID="24b0b6ec78a9e92291f26205308b2ae419052c806aa88f173d5e1fff4ace01b4" Feb 03 07:19:53 crc kubenswrapper[4708]: I0203 07:19:53.833522 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:19:53 crc kubenswrapper[4708]: I0203 07:19:53.834248 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:20:23 crc kubenswrapper[4708]: I0203 07:20:23.833615 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:20:23 crc kubenswrapper[4708]: I0203 07:20:23.834981 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:20:53 crc kubenswrapper[4708]: I0203 07:20:53.834209 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:20:53 crc kubenswrapper[4708]: I0203 07:20:53.834826 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:20:53 crc kubenswrapper[4708]: I0203 07:20:53.834873 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:20:53 crc kubenswrapper[4708]: I0203 07:20:53.835535 4708 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b515af69685be553960030cd44dc51009b8fa698b76226a5a9defa9717522d12"} pod="openshift-machine-config-operator/machine-config-daemon-r94bn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:20:53 crc kubenswrapper[4708]: I0203 07:20:53.835586 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" containerID="cri-o://b515af69685be553960030cd44dc51009b8fa698b76226a5a9defa9717522d12" gracePeriod=600 Feb 03 07:20:54 crc kubenswrapper[4708]: I0203 07:20:54.564332 4708 generic.go:334] "Generic (PLEG): container finished" podID="67498414-5132-496e-9638-189f5941ace0" containerID="b515af69685be553960030cd44dc51009b8fa698b76226a5a9defa9717522d12" exitCode=0 Feb 03 07:20:54 crc kubenswrapper[4708]: I0203 07:20:54.564429 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerDied","Data":"b515af69685be553960030cd44dc51009b8fa698b76226a5a9defa9717522d12"} Feb 03 07:20:54 crc kubenswrapper[4708]: I0203 07:20:54.564913 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"85eaca543a1965c16dfd9764a0051f13446290670638b0dde7e65e129f02d68c"} Feb 03 07:20:54 crc kubenswrapper[4708]: I0203 07:20:54.564947 4708 scope.go:117] "RemoveContainer" containerID="b3d08046058bec9282f069dc43715820de60eb6b7be4c972144f0a3216e6f3f3" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.962910 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc"] Feb 03 07:20:55 crc kubenswrapper[4708]: E0203 07:20:55.963457 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c35d1e2c-9135-4bff-a0af-cd20addc6134" containerName="registry" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.963473 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="c35d1e2c-9135-4bff-a0af-cd20addc6134" containerName="registry" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.963610 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="c35d1e2c-9135-4bff-a0af-cd20addc6134" containerName="registry" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.964297 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.965629 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.966867 4708 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-x9rjm" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.970067 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.979048 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-jpxrc"] Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.979875 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-jpxrc" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.982194 4708 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-d89kb" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.985967 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc"] Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.990911 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-j4njw"] Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.991549 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-j4njw" Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.993985 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-j4njw"] Feb 03 07:20:55 crc kubenswrapper[4708]: I0203 07:20:55.994294 4708 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-dqr7q" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.003554 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-jpxrc"] Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.042332 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkrhb\" (UniqueName: \"kubernetes.io/projected/4df04764-d566-42a4-b7f4-af82a04b3fc3-kube-api-access-rkrhb\") pod \"cert-manager-cainjector-cf98fcc89-xfwvc\" (UID: \"4df04764-d566-42a4-b7f4-af82a04b3fc3\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.042442 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9s82v\" (UniqueName: \"kubernetes.io/projected/4b1cdb17-b07e-4d3a-86fd-418361057f9d-kube-api-access-9s82v\") pod \"cert-manager-webhook-687f57d79b-j4njw\" (UID: \"4b1cdb17-b07e-4d3a-86fd-418361057f9d\") " pod="cert-manager/cert-manager-webhook-687f57d79b-j4njw" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.042468 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcvpr\" (UniqueName: \"kubernetes.io/projected/59a903ed-9dad-488c-b531-cbe96052d31b-kube-api-access-gcvpr\") pod \"cert-manager-858654f9db-jpxrc\" (UID: \"59a903ed-9dad-488c-b531-cbe96052d31b\") " pod="cert-manager/cert-manager-858654f9db-jpxrc" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.143569 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9s82v\" (UniqueName: \"kubernetes.io/projected/4b1cdb17-b07e-4d3a-86fd-418361057f9d-kube-api-access-9s82v\") pod \"cert-manager-webhook-687f57d79b-j4njw\" (UID: \"4b1cdb17-b07e-4d3a-86fd-418361057f9d\") " pod="cert-manager/cert-manager-webhook-687f57d79b-j4njw" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.143617 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcvpr\" (UniqueName: \"kubernetes.io/projected/59a903ed-9dad-488c-b531-cbe96052d31b-kube-api-access-gcvpr\") pod \"cert-manager-858654f9db-jpxrc\" (UID: \"59a903ed-9dad-488c-b531-cbe96052d31b\") " pod="cert-manager/cert-manager-858654f9db-jpxrc" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.144030 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkrhb\" (UniqueName: \"kubernetes.io/projected/4df04764-d566-42a4-b7f4-af82a04b3fc3-kube-api-access-rkrhb\") pod \"cert-manager-cainjector-cf98fcc89-xfwvc\" (UID: \"4df04764-d566-42a4-b7f4-af82a04b3fc3\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.162762 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkrhb\" (UniqueName: \"kubernetes.io/projected/4df04764-d566-42a4-b7f4-af82a04b3fc3-kube-api-access-rkrhb\") pod \"cert-manager-cainjector-cf98fcc89-xfwvc\" (UID: \"4df04764-d566-42a4-b7f4-af82a04b3fc3\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.163119 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9s82v\" (UniqueName: \"kubernetes.io/projected/4b1cdb17-b07e-4d3a-86fd-418361057f9d-kube-api-access-9s82v\") pod \"cert-manager-webhook-687f57d79b-j4njw\" (UID: \"4b1cdb17-b07e-4d3a-86fd-418361057f9d\") " pod="cert-manager/cert-manager-webhook-687f57d79b-j4njw" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.163426 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcvpr\" (UniqueName: \"kubernetes.io/projected/59a903ed-9dad-488c-b531-cbe96052d31b-kube-api-access-gcvpr\") pod \"cert-manager-858654f9db-jpxrc\" (UID: \"59a903ed-9dad-488c-b531-cbe96052d31b\") " pod="cert-manager/cert-manager-858654f9db-jpxrc" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.283569 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.295705 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-jpxrc" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.306029 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-j4njw" Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.506069 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc"] Feb 03 07:20:56 crc kubenswrapper[4708]: W0203 07:20:56.511138 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4df04764_d566_42a4_b7f4_af82a04b3fc3.slice/crio-d63164d78c30bb05d64972df2b15c3b5cd097200fd210dc248b62f6cb60dd931 WatchSource:0}: Error finding container d63164d78c30bb05d64972df2b15c3b5cd097200fd210dc248b62f6cb60dd931: Status 404 returned error can't find the container with id d63164d78c30bb05d64972df2b15c3b5cd097200fd210dc248b62f6cb60dd931 Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.514690 4708 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.549018 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-jpxrc"] Feb 03 07:20:56 crc kubenswrapper[4708]: W0203 07:20:56.557353 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59a903ed_9dad_488c_b531_cbe96052d31b.slice/crio-101c7492154d7c286818337d28ae1dfc98cc0ec936eaa380cfeb866b74bb386d WatchSource:0}: Error finding container 101c7492154d7c286818337d28ae1dfc98cc0ec936eaa380cfeb866b74bb386d: Status 404 returned error can't find the container with id 101c7492154d7c286818337d28ae1dfc98cc0ec936eaa380cfeb866b74bb386d Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.580703 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc" event={"ID":"4df04764-d566-42a4-b7f4-af82a04b3fc3","Type":"ContainerStarted","Data":"d63164d78c30bb05d64972df2b15c3b5cd097200fd210dc248b62f6cb60dd931"} Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.581471 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-jpxrc" event={"ID":"59a903ed-9dad-488c-b531-cbe96052d31b","Type":"ContainerStarted","Data":"101c7492154d7c286818337d28ae1dfc98cc0ec936eaa380cfeb866b74bb386d"} Feb 03 07:20:56 crc kubenswrapper[4708]: I0203 07:20:56.582303 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-j4njw"] Feb 03 07:20:56 crc kubenswrapper[4708]: W0203 07:20:56.587662 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b1cdb17_b07e_4d3a_86fd_418361057f9d.slice/crio-ccadcf0449f2f42a425bfa026ca47f953d73ca1e7e4d17cb6f0d66fde963af11 WatchSource:0}: Error finding container ccadcf0449f2f42a425bfa026ca47f953d73ca1e7e4d17cb6f0d66fde963af11: Status 404 returned error can't find the container with id ccadcf0449f2f42a425bfa026ca47f953d73ca1e7e4d17cb6f0d66fde963af11 Feb 03 07:20:57 crc kubenswrapper[4708]: I0203 07:20:57.589431 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-j4njw" event={"ID":"4b1cdb17-b07e-4d3a-86fd-418361057f9d","Type":"ContainerStarted","Data":"ccadcf0449f2f42a425bfa026ca47f953d73ca1e7e4d17cb6f0d66fde963af11"} Feb 03 07:21:01 crc kubenswrapper[4708]: I0203 07:21:01.617343 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-j4njw" event={"ID":"4b1cdb17-b07e-4d3a-86fd-418361057f9d","Type":"ContainerStarted","Data":"c6ec2fd153338ad20ad053794352a0ae73df99c210e9e3367b03e6acdaf77873"} Feb 03 07:21:01 crc kubenswrapper[4708]: I0203 07:21:01.618260 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-j4njw" Feb 03 07:21:01 crc kubenswrapper[4708]: I0203 07:21:01.619441 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc" event={"ID":"4df04764-d566-42a4-b7f4-af82a04b3fc3","Type":"ContainerStarted","Data":"bedb376625057db4c893e5931d90574265c4122105d23464acc2c5bd240e4733"} Feb 03 07:21:01 crc kubenswrapper[4708]: I0203 07:21:01.621981 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-jpxrc" event={"ID":"59a903ed-9dad-488c-b531-cbe96052d31b","Type":"ContainerStarted","Data":"314e49ae4ea022e0995f8c96bf3774915002fe919da204e2c46cccbe40610dfe"} Feb 03 07:21:01 crc kubenswrapper[4708]: I0203 07:21:01.642259 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-j4njw" podStartSLOduration=2.168040523 podStartE2EDuration="6.642230967s" podCreationTimestamp="2026-02-03 07:20:55 +0000 UTC" firstStartedPulling="2026-02-03 07:20:56.58972538 +0000 UTC m=+635.571672187" lastFinishedPulling="2026-02-03 07:21:01.063915824 +0000 UTC m=+640.045862631" observedRunningTime="2026-02-03 07:21:01.637264428 +0000 UTC m=+640.619211275" watchObservedRunningTime="2026-02-03 07:21:01.642230967 +0000 UTC m=+640.624177814" Feb 03 07:21:01 crc kubenswrapper[4708]: I0203 07:21:01.663218 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-jpxrc" podStartSLOduration=2.507968902 podStartE2EDuration="6.663194454s" podCreationTimestamp="2026-02-03 07:20:55 +0000 UTC" firstStartedPulling="2026-02-03 07:20:56.559298394 +0000 UTC m=+635.541245211" lastFinishedPulling="2026-02-03 07:21:00.714523916 +0000 UTC m=+639.696470763" observedRunningTime="2026-02-03 07:21:01.660364676 +0000 UTC m=+640.642311523" watchObservedRunningTime="2026-02-03 07:21:01.663194454 +0000 UTC m=+640.645141301" Feb 03 07:21:01 crc kubenswrapper[4708]: I0203 07:21:01.694506 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-xfwvc" podStartSLOduration=2.494336672 podStartE2EDuration="6.6944808s" podCreationTimestamp="2026-02-03 07:20:55 +0000 UTC" firstStartedPulling="2026-02-03 07:20:56.514429549 +0000 UTC m=+635.496376356" lastFinishedPulling="2026-02-03 07:21:00.714573637 +0000 UTC m=+639.696520484" observedRunningTime="2026-02-03 07:21:01.690081174 +0000 UTC m=+640.672028001" watchObservedRunningTime="2026-02-03 07:21:01.6944808 +0000 UTC m=+640.676427657" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.279339 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2sfqf"] Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.279988 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovn-controller" containerID="cri-o://5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff" gracePeriod=30 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.280032 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="nbdb" containerID="cri-o://ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59" gracePeriod=30 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.280096 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="northd" containerID="cri-o://24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597" gracePeriod=30 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.280129 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53" gracePeriod=30 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.280159 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="kube-rbac-proxy-node" containerID="cri-o://11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545" gracePeriod=30 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.280196 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="sbdb" containerID="cri-o://e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30" gracePeriod=30 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.280192 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovn-acl-logging" containerID="cri-o://3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1" gracePeriod=30 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.315034 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" containerID="cri-o://fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99" gracePeriod=30 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.573606 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/3.log" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.604403 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovn-acl-logging/0.log" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.604901 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovn-controller/0.log" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.605456 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.644041 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2fzr_7cedfe91-d1c3-4c56-9aac-797ecade9468/kube-multus/2.log" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.644621 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2fzr_7cedfe91-d1c3-4c56-9aac-797ecade9468/kube-multus/1.log" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.644708 4708 generic.go:334] "Generic (PLEG): container finished" podID="7cedfe91-d1c3-4c56-9aac-797ecade9468" containerID="998f20f20c9b0feb9812819e21b6baa21d47ead8601fee9887b8830380f6a31b" exitCode=2 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.644821 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2fzr" event={"ID":"7cedfe91-d1c3-4c56-9aac-797ecade9468","Type":"ContainerDied","Data":"998f20f20c9b0feb9812819e21b6baa21d47ead8601fee9887b8830380f6a31b"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.644976 4708 scope.go:117] "RemoveContainer" containerID="0d4f0bd78f46aff839e5e3f84aab51a1734c1968d5d9f306b6175d0c4e21770e" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.645392 4708 scope.go:117] "RemoveContainer" containerID="998f20f20c9b0feb9812819e21b6baa21d47ead8601fee9887b8830380f6a31b" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.645594 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-f2fzr_openshift-multus(7cedfe91-d1c3-4c56-9aac-797ecade9468)\"" pod="openshift-multus/multus-f2fzr" podUID="7cedfe91-d1c3-4c56-9aac-797ecade9468" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.647180 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovnkube-controller/3.log" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.648983 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovn-acl-logging/0.log" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.649469 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2sfqf_b0d14461-efec-4909-82de-2cce585892a4/ovn-controller/0.log" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650274 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99" exitCode=0 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650356 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30" exitCode=0 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650409 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59" exitCode=0 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650457 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597" exitCode=0 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650502 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53" exitCode=0 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650569 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545" exitCode=0 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650627 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1" exitCode=143 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650678 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0d14461-efec-4909-82de-2cce585892a4" containerID="5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff" exitCode=143 Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650740 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650857 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650923 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.650983 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651040 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651093 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651147 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651196 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651244 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651311 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651362 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651406 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651452 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651495 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651542 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651592 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651643 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651694 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651739 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651812 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651859 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651913 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.651961 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.652008 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.652055 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.652100 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.652143 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.652194 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.652248 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.652294 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.652341 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.652397 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.655640 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.655761 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.655831 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.655883 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.655931 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.655978 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656038 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" event={"ID":"b0d14461-efec-4909-82de-2cce585892a4","Type":"ContainerDied","Data":"b7ba1fa84452193eec5442ff36fb3097b8a7724c6051aefe0ff59d7d03f1bad7"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656100 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656154 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656203 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656248 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656294 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656347 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656402 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656460 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656511 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.656559 4708 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8"} Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.654938 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2sfqf" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.672584 4708 scope.go:117] "RemoveContainer" containerID="fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680266 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-ovn-kubernetes\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680319 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-netns\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680375 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-env-overrides\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680402 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-ovn\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680429 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-node-log\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680452 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-script-lib\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680476 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-netd\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680495 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-slash\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680521 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-openvswitch\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680542 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-config\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680561 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-var-lib-openvswitch\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680585 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-etc-openvswitch\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680608 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-systemd\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680629 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-systemd-units\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680658 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-bin\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680697 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtfmh\" (UniqueName: \"kubernetes.io/projected/b0d14461-efec-4909-82de-2cce585892a4-kube-api-access-qtfmh\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680718 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-kubelet\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680745 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-log-socket\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680767 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.680784 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b0d14461-efec-4909-82de-2cce585892a4-ovn-node-metrics-cert\") pod \"b0d14461-efec-4909-82de-2cce585892a4\" (UID: \"b0d14461-efec-4909-82de-2cce585892a4\") " Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681242 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681254 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681282 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681312 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681413 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681428 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681450 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681456 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681474 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-node-log" (OuterVolumeSpecName: "node-log") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681479 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-slash" (OuterVolumeSpecName: "host-slash") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681504 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681522 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681545 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-log-socket" (OuterVolumeSpecName: "log-socket") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681633 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681833 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681858 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.681980 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.688016 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.688696 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0d14461-efec-4909-82de-2cce585892a4-kube-api-access-qtfmh" (OuterVolumeSpecName: "kube-api-access-qtfmh") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "kube-api-access-qtfmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.692432 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4wqqx"] Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.692474 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0d14461-efec-4909-82de-2cce585892a4-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.692662 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="kube-rbac-proxy-ovn-metrics" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.692675 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="kube-rbac-proxy-ovn-metrics" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.692686 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="sbdb" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.692692 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="sbdb" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.692920 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.692934 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.692943 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.692949 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.692967 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.692973 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.692984 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovn-acl-logging" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.692990 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovn-acl-logging" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.692999 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="northd" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693005 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="northd" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.693015 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="kube-rbac-proxy-node" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693022 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="kube-rbac-proxy-node" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.693030 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="nbdb" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693035 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="nbdb" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.693043 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="kubecfg-setup" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693049 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="kubecfg-setup" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.693059 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovn-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693066 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovn-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693153 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="kube-rbac-proxy-ovn-metrics" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693160 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693167 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovn-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693176 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="northd" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693184 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="kube-rbac-proxy-node" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693192 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="nbdb" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693198 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693205 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="sbdb" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693213 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovn-acl-logging" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693222 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693230 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.693320 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693327 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.693334 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693340 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.693450 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0d14461-efec-4909-82de-2cce585892a4" containerName="ovnkube-controller" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.695151 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.695479 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "b0d14461-efec-4909-82de-2cce585892a4" (UID: "b0d14461-efec-4909-82de-2cce585892a4"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.711076 4708 scope.go:117] "RemoveContainer" containerID="e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.721978 4708 scope.go:117] "RemoveContainer" containerID="ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.737546 4708 scope.go:117] "RemoveContainer" containerID="24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.749348 4708 scope.go:117] "RemoveContainer" containerID="7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.761680 4708 scope.go:117] "RemoveContainer" containerID="11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.772999 4708 scope.go:117] "RemoveContainer" containerID="3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782201 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-cni-netd\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782275 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-run-ovn-kubernetes\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782376 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-node-log\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782406 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782454 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-run-netns\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782475 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-kubelet\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782494 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/759763c9-ddf1-468d-a2f5-73c98d5a38f4-ovnkube-config\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782517 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-run-ovn\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782627 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-systemd-units\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782674 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-log-socket\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782723 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/759763c9-ddf1-468d-a2f5-73c98d5a38f4-ovn-node-metrics-cert\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782754 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-run-openvswitch\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782808 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-cni-bin\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782832 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-slash\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782854 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfnkg\" (UniqueName: \"kubernetes.io/projected/759763c9-ddf1-468d-a2f5-73c98d5a38f4-kube-api-access-lfnkg\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782870 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-var-lib-openvswitch\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782891 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-etc-openvswitch\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782914 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/759763c9-ddf1-468d-a2f5-73c98d5a38f4-env-overrides\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782931 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/759763c9-ddf1-468d-a2f5-73c98d5a38f4-ovnkube-script-lib\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.782954 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-run-systemd\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783017 4708 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-netns\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783028 4708 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783038 4708 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783046 4708 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-node-log\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783055 4708 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783065 4708 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-netd\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783074 4708 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-slash\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783082 4708 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783091 4708 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b0d14461-efec-4909-82de-2cce585892a4-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783099 4708 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783107 4708 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783115 4708 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-run-systemd\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783124 4708 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-systemd-units\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783132 4708 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-cni-bin\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783141 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtfmh\" (UniqueName: \"kubernetes.io/projected/b0d14461-efec-4909-82de-2cce585892a4-kube-api-access-qtfmh\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783150 4708 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-kubelet\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783157 4708 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-log-socket\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783165 4708 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783173 4708 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b0d14461-efec-4909-82de-2cce585892a4-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.783183 4708 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b0d14461-efec-4909-82de-2cce585892a4-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.788414 4708 scope.go:117] "RemoveContainer" containerID="5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.801987 4708 scope.go:117] "RemoveContainer" containerID="3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.815937 4708 scope.go:117] "RemoveContainer" containerID="fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.816330 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99\": container with ID starting with fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99 not found: ID does not exist" containerID="fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.816366 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99"} err="failed to get container status \"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99\": rpc error: code = NotFound desc = could not find container \"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99\": container with ID starting with fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.816387 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.816845 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\": container with ID starting with cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1 not found: ID does not exist" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.816899 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1"} err="failed to get container status \"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\": rpc error: code = NotFound desc = could not find container \"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\": container with ID starting with cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.816937 4708 scope.go:117] "RemoveContainer" containerID="e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.817328 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\": container with ID starting with e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30 not found: ID does not exist" containerID="e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.817377 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30"} err="failed to get container status \"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\": rpc error: code = NotFound desc = could not find container \"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\": container with ID starting with e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.817393 4708 scope.go:117] "RemoveContainer" containerID="ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.817953 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\": container with ID starting with ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59 not found: ID does not exist" containerID="ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.817980 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59"} err="failed to get container status \"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\": rpc error: code = NotFound desc = could not find container \"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\": container with ID starting with ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.817998 4708 scope.go:117] "RemoveContainer" containerID="24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.818293 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\": container with ID starting with 24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597 not found: ID does not exist" containerID="24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.818338 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597"} err="failed to get container status \"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\": rpc error: code = NotFound desc = could not find container \"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\": container with ID starting with 24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.818366 4708 scope.go:117] "RemoveContainer" containerID="7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.819452 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\": container with ID starting with 7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53 not found: ID does not exist" containerID="7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.819478 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53"} err="failed to get container status \"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\": rpc error: code = NotFound desc = could not find container \"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\": container with ID starting with 7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.819492 4708 scope.go:117] "RemoveContainer" containerID="11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.819827 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\": container with ID starting with 11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545 not found: ID does not exist" containerID="11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.819872 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545"} err="failed to get container status \"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\": rpc error: code = NotFound desc = could not find container \"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\": container with ID starting with 11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.819976 4708 scope.go:117] "RemoveContainer" containerID="3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.820306 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\": container with ID starting with 3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1 not found: ID does not exist" containerID="3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.820365 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1"} err="failed to get container status \"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\": rpc error: code = NotFound desc = could not find container \"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\": container with ID starting with 3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.820387 4708 scope.go:117] "RemoveContainer" containerID="5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.820699 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\": container with ID starting with 5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff not found: ID does not exist" containerID="5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.820730 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff"} err="failed to get container status \"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\": rpc error: code = NotFound desc = could not find container \"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\": container with ID starting with 5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.820749 4708 scope.go:117] "RemoveContainer" containerID="3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8" Feb 03 07:21:05 crc kubenswrapper[4708]: E0203 07:21:05.821020 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\": container with ID starting with 3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8 not found: ID does not exist" containerID="3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.821062 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8"} err="failed to get container status \"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\": rpc error: code = NotFound desc = could not find container \"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\": container with ID starting with 3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.821082 4708 scope.go:117] "RemoveContainer" containerID="fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.821935 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99"} err="failed to get container status \"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99\": rpc error: code = NotFound desc = could not find container \"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99\": container with ID starting with fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.821957 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.822196 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1"} err="failed to get container status \"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\": rpc error: code = NotFound desc = could not find container \"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\": container with ID starting with cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.822216 4708 scope.go:117] "RemoveContainer" containerID="e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.822468 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30"} err="failed to get container status \"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\": rpc error: code = NotFound desc = could not find container \"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\": container with ID starting with e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.822491 4708 scope.go:117] "RemoveContainer" containerID="ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.822735 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59"} err="failed to get container status \"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\": rpc error: code = NotFound desc = could not find container \"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\": container with ID starting with ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.822754 4708 scope.go:117] "RemoveContainer" containerID="24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.822992 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597"} err="failed to get container status \"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\": rpc error: code = NotFound desc = could not find container \"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\": container with ID starting with 24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.823044 4708 scope.go:117] "RemoveContainer" containerID="7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.823324 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53"} err="failed to get container status \"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\": rpc error: code = NotFound desc = could not find container \"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\": container with ID starting with 7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.823364 4708 scope.go:117] "RemoveContainer" containerID="11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.823634 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545"} err="failed to get container status \"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\": rpc error: code = NotFound desc = could not find container \"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\": container with ID starting with 11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.823663 4708 scope.go:117] "RemoveContainer" containerID="3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.824003 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1"} err="failed to get container status \"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\": rpc error: code = NotFound desc = could not find container \"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\": container with ID starting with 3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.824026 4708 scope.go:117] "RemoveContainer" containerID="5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.824244 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff"} err="failed to get container status \"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\": rpc error: code = NotFound desc = could not find container \"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\": container with ID starting with 5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.824268 4708 scope.go:117] "RemoveContainer" containerID="3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.824508 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8"} err="failed to get container status \"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\": rpc error: code = NotFound desc = could not find container \"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\": container with ID starting with 3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.824552 4708 scope.go:117] "RemoveContainer" containerID="fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.824842 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99"} err="failed to get container status \"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99\": rpc error: code = NotFound desc = could not find container \"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99\": container with ID starting with fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.824869 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.825082 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1"} err="failed to get container status \"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\": rpc error: code = NotFound desc = could not find container \"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\": container with ID starting with cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.825111 4708 scope.go:117] "RemoveContainer" containerID="e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.825273 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30"} err="failed to get container status \"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\": rpc error: code = NotFound desc = could not find container \"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\": container with ID starting with e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.825297 4708 scope.go:117] "RemoveContainer" containerID="ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.825504 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59"} err="failed to get container status \"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\": rpc error: code = NotFound desc = could not find container \"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\": container with ID starting with ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.825523 4708 scope.go:117] "RemoveContainer" containerID="24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.826224 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597"} err="failed to get container status \"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\": rpc error: code = NotFound desc = could not find container \"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\": container with ID starting with 24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.826250 4708 scope.go:117] "RemoveContainer" containerID="7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.826462 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53"} err="failed to get container status \"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\": rpc error: code = NotFound desc = could not find container \"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\": container with ID starting with 7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.826481 4708 scope.go:117] "RemoveContainer" containerID="11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.826759 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545"} err="failed to get container status \"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\": rpc error: code = NotFound desc = could not find container \"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\": container with ID starting with 11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.826780 4708 scope.go:117] "RemoveContainer" containerID="3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.827038 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1"} err="failed to get container status \"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\": rpc error: code = NotFound desc = could not find container \"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\": container with ID starting with 3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.827064 4708 scope.go:117] "RemoveContainer" containerID="5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.827319 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff"} err="failed to get container status \"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\": rpc error: code = NotFound desc = could not find container \"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\": container with ID starting with 5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.827342 4708 scope.go:117] "RemoveContainer" containerID="3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.827546 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8"} err="failed to get container status \"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\": rpc error: code = NotFound desc = could not find container \"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\": container with ID starting with 3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.827573 4708 scope.go:117] "RemoveContainer" containerID="fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.827778 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99"} err="failed to get container status \"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99\": rpc error: code = NotFound desc = could not find container \"fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99\": container with ID starting with fe5bf6c2cf221268f63d1d71973d9c953a824b46302ceadab2c5ed65511bec99 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.827814 4708 scope.go:117] "RemoveContainer" containerID="cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.828005 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1"} err="failed to get container status \"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\": rpc error: code = NotFound desc = could not find container \"cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1\": container with ID starting with cee5533553f2ddf0222983a0fd77f0affe7665d9969cc06eaaade87a87e8f0f1 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.828033 4708 scope.go:117] "RemoveContainer" containerID="e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.828272 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30"} err="failed to get container status \"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\": rpc error: code = NotFound desc = could not find container \"e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30\": container with ID starting with e9a2141b711f6aea0a487147e07f7cb21a1c03fc2ceee43fee272b38a9397a30 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.828298 4708 scope.go:117] "RemoveContainer" containerID="ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.828500 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59"} err="failed to get container status \"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\": rpc error: code = NotFound desc = could not find container \"ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59\": container with ID starting with ee24c4024dcac35ad12edae904f68c5acb8b6f8ce3a12d20dedb729626902c59 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.828518 4708 scope.go:117] "RemoveContainer" containerID="24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.828722 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597"} err="failed to get container status \"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\": rpc error: code = NotFound desc = could not find container \"24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597\": container with ID starting with 24460397989e4adb94540c34527c9cbde802c4a1cd4b6d3521784d8b98199597 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.828748 4708 scope.go:117] "RemoveContainer" containerID="7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.828959 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53"} err="failed to get container status \"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\": rpc error: code = NotFound desc = could not find container \"7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53\": container with ID starting with 7f17633faeea1d7a9bb8572946ccf70aac0865fcb1ffaf5c58ef47dbe4507f53 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.828989 4708 scope.go:117] "RemoveContainer" containerID="11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.829331 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545"} err="failed to get container status \"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\": rpc error: code = NotFound desc = could not find container \"11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545\": container with ID starting with 11837fe220df25f4bc721de530c1241ee9d2b7fb81d5aa8ffea9042a752f1545 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.829360 4708 scope.go:117] "RemoveContainer" containerID="3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.829602 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1"} err="failed to get container status \"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\": rpc error: code = NotFound desc = could not find container \"3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1\": container with ID starting with 3e793830fe467778707147f1fc77ddf2b469cf2a3cf54ff5eb30f54b0acdded1 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.829627 4708 scope.go:117] "RemoveContainer" containerID="5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.830236 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff"} err="failed to get container status \"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\": rpc error: code = NotFound desc = could not find container \"5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff\": container with ID starting with 5003dd7cf713e38400fcf14e3be5a69ced01cfb4f6f53c4672a76692b3c2efff not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.830267 4708 scope.go:117] "RemoveContainer" containerID="3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.830513 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8"} err="failed to get container status \"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\": rpc error: code = NotFound desc = could not find container \"3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8\": container with ID starting with 3f639079817e4a7045e0246459b94333e894639022d4aa34b09c6df410bac6b8 not found: ID does not exist" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884425 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-run-ovn-kubernetes\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884497 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-node-log\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884538 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884568 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-run-ovn-kubernetes\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884640 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-run-netns\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884582 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-run-netns\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884673 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-node-log\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884709 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-kubelet\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884737 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884750 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/759763c9-ddf1-468d-a2f5-73c98d5a38f4-ovnkube-config\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884818 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-run-ovn\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884856 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-systemd-units\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884925 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-log-socket\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884966 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/759763c9-ddf1-468d-a2f5-73c98d5a38f4-ovn-node-metrics-cert\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885000 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-run-openvswitch\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885041 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-cni-bin\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885085 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-slash\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885127 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfnkg\" (UniqueName: \"kubernetes.io/projected/759763c9-ddf1-468d-a2f5-73c98d5a38f4-kube-api-access-lfnkg\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885159 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-var-lib-openvswitch\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885211 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-etc-openvswitch\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885251 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/759763c9-ddf1-468d-a2f5-73c98d5a38f4-env-overrides\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885282 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/759763c9-ddf1-468d-a2f5-73c98d5a38f4-ovnkube-script-lib\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885328 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-run-systemd\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885389 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-cni-netd\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885494 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-cni-netd\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885520 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/759763c9-ddf1-468d-a2f5-73c98d5a38f4-ovnkube-config\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885547 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-run-ovn\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885568 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-slash\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.884820 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-kubelet\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885607 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-systemd-units\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885632 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-log-socket\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885940 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-var-lib-openvswitch\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.885981 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-run-systemd\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.886005 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-host-cni-bin\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.886014 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-run-openvswitch\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.886094 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/759763c9-ddf1-468d-a2f5-73c98d5a38f4-etc-openvswitch\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.886476 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/759763c9-ddf1-468d-a2f5-73c98d5a38f4-env-overrides\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.886514 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/759763c9-ddf1-468d-a2f5-73c98d5a38f4-ovnkube-script-lib\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.892141 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/759763c9-ddf1-468d-a2f5-73c98d5a38f4-ovn-node-metrics-cert\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.902410 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfnkg\" (UniqueName: \"kubernetes.io/projected/759763c9-ddf1-468d-a2f5-73c98d5a38f4-kube-api-access-lfnkg\") pod \"ovnkube-node-4wqqx\" (UID: \"759763c9-ddf1-468d-a2f5-73c98d5a38f4\") " pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.989038 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2sfqf"] Feb 03 07:21:05 crc kubenswrapper[4708]: I0203 07:21:05.994448 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2sfqf"] Feb 03 07:21:06 crc kubenswrapper[4708]: I0203 07:21:06.013306 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:06 crc kubenswrapper[4708]: W0203 07:21:06.034279 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod759763c9_ddf1_468d_a2f5_73c98d5a38f4.slice/crio-aef67c55e4a493497e32d1313a181f8d6aef5dddfa3b617755fbd26d131aeefe WatchSource:0}: Error finding container aef67c55e4a493497e32d1313a181f8d6aef5dddfa3b617755fbd26d131aeefe: Status 404 returned error can't find the container with id aef67c55e4a493497e32d1313a181f8d6aef5dddfa3b617755fbd26d131aeefe Feb 03 07:21:06 crc kubenswrapper[4708]: I0203 07:21:06.105351 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0d14461-efec-4909-82de-2cce585892a4" path="/var/lib/kubelet/pods/b0d14461-efec-4909-82de-2cce585892a4/volumes" Feb 03 07:21:06 crc kubenswrapper[4708]: I0203 07:21:06.309748 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-j4njw" Feb 03 07:21:06 crc kubenswrapper[4708]: I0203 07:21:06.658770 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2fzr_7cedfe91-d1c3-4c56-9aac-797ecade9468/kube-multus/2.log" Feb 03 07:21:06 crc kubenswrapper[4708]: I0203 07:21:06.661135 4708 generic.go:334] "Generic (PLEG): container finished" podID="759763c9-ddf1-468d-a2f5-73c98d5a38f4" containerID="ee03c8fedec1c82d220bf905f1bb6f98116a20560c09d3d3ffb29e7886004a5a" exitCode=0 Feb 03 07:21:06 crc kubenswrapper[4708]: I0203 07:21:06.661203 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" event={"ID":"759763c9-ddf1-468d-a2f5-73c98d5a38f4","Type":"ContainerDied","Data":"ee03c8fedec1c82d220bf905f1bb6f98116a20560c09d3d3ffb29e7886004a5a"} Feb 03 07:21:06 crc kubenswrapper[4708]: I0203 07:21:06.661241 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" event={"ID":"759763c9-ddf1-468d-a2f5-73c98d5a38f4","Type":"ContainerStarted","Data":"aef67c55e4a493497e32d1313a181f8d6aef5dddfa3b617755fbd26d131aeefe"} Feb 03 07:21:07 crc kubenswrapper[4708]: I0203 07:21:07.676102 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" event={"ID":"759763c9-ddf1-468d-a2f5-73c98d5a38f4","Type":"ContainerStarted","Data":"99a7824286e6b604b653045c0f2234e55306d0dd4506f252fd39d0e1885ebf84"} Feb 03 07:21:07 crc kubenswrapper[4708]: I0203 07:21:07.676442 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" event={"ID":"759763c9-ddf1-468d-a2f5-73c98d5a38f4","Type":"ContainerStarted","Data":"cc244327fd7979a1008a56ed83669a26dd13e951ecdf312571a99aecc6187f2c"} Feb 03 07:21:07 crc kubenswrapper[4708]: I0203 07:21:07.676457 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" event={"ID":"759763c9-ddf1-468d-a2f5-73c98d5a38f4","Type":"ContainerStarted","Data":"39f193173caf113590cfd6ff1cae45a85f6681ef2042062180785ea1ce53a40f"} Feb 03 07:21:07 crc kubenswrapper[4708]: I0203 07:21:07.676470 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" event={"ID":"759763c9-ddf1-468d-a2f5-73c98d5a38f4","Type":"ContainerStarted","Data":"185853ff20a08d09033017c31dee1e54eb8e988eae2dc6677600a13625cd0471"} Feb 03 07:21:07 crc kubenswrapper[4708]: I0203 07:21:07.676482 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" event={"ID":"759763c9-ddf1-468d-a2f5-73c98d5a38f4","Type":"ContainerStarted","Data":"0133bc390bad727588e9a96add0d256082475686a9b074091a9e1ea054f0ee55"} Feb 03 07:21:07 crc kubenswrapper[4708]: I0203 07:21:07.676494 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" event={"ID":"759763c9-ddf1-468d-a2f5-73c98d5a38f4","Type":"ContainerStarted","Data":"60c1eea0e4724fde00bb405505be0fa0e63ec221d3f3b4a813bad3967836c266"} Feb 03 07:21:10 crc kubenswrapper[4708]: I0203 07:21:10.710315 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" event={"ID":"759763c9-ddf1-468d-a2f5-73c98d5a38f4","Type":"ContainerStarted","Data":"0afebf7cdab0c7998e1d4b2f021a0f7d1aed65fa78f07b38012843c770d95997"} Feb 03 07:21:12 crc kubenswrapper[4708]: I0203 07:21:12.727295 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" event={"ID":"759763c9-ddf1-468d-a2f5-73c98d5a38f4","Type":"ContainerStarted","Data":"41695d5c68cd7e49ab6b4a56b6a7387efad39615df8b23cd48b0cacb39c61876"} Feb 03 07:21:12 crc kubenswrapper[4708]: I0203 07:21:12.727972 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:12 crc kubenswrapper[4708]: I0203 07:21:12.727987 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:12 crc kubenswrapper[4708]: I0203 07:21:12.727998 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:12 crc kubenswrapper[4708]: I0203 07:21:12.758289 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:12 crc kubenswrapper[4708]: I0203 07:21:12.758639 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:12 crc kubenswrapper[4708]: I0203 07:21:12.770189 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" podStartSLOduration=7.7701606210000005 podStartE2EDuration="7.770160621s" podCreationTimestamp="2026-02-03 07:21:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:21:12.761043759 +0000 UTC m=+651.742990586" watchObservedRunningTime="2026-02-03 07:21:12.770160621 +0000 UTC m=+651.752107468" Feb 03 07:21:21 crc kubenswrapper[4708]: I0203 07:21:21.093033 4708 scope.go:117] "RemoveContainer" containerID="998f20f20c9b0feb9812819e21b6baa21d47ead8601fee9887b8830380f6a31b" Feb 03 07:21:21 crc kubenswrapper[4708]: E0203 07:21:21.094231 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-f2fzr_openshift-multus(7cedfe91-d1c3-4c56-9aac-797ecade9468)\"" pod="openshift-multus/multus-f2fzr" podUID="7cedfe91-d1c3-4c56-9aac-797ecade9468" Feb 03 07:21:32 crc kubenswrapper[4708]: I0203 07:21:32.104775 4708 scope.go:117] "RemoveContainer" containerID="998f20f20c9b0feb9812819e21b6baa21d47ead8601fee9887b8830380f6a31b" Feb 03 07:21:32 crc kubenswrapper[4708]: I0203 07:21:32.869932 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2fzr_7cedfe91-d1c3-4c56-9aac-797ecade9468/kube-multus/2.log" Feb 03 07:21:32 crc kubenswrapper[4708]: I0203 07:21:32.870257 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2fzr" event={"ID":"7cedfe91-d1c3-4c56-9aac-797ecade9468","Type":"ContainerStarted","Data":"6c4cf0ef2416fab1970ebe315b8c494a60bede135f38c941a86d6a73264d3628"} Feb 03 07:21:36 crc kubenswrapper[4708]: I0203 07:21:36.056554 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4wqqx" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.617990 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n"] Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.620746 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.624008 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.631031 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n"] Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.770013 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.770129 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.770343 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w552w\" (UniqueName: \"kubernetes.io/projected/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-kube-api-access-w552w\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.871738 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w552w\" (UniqueName: \"kubernetes.io/projected/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-kube-api-access-w552w\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.872367 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.872446 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.873199 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.873316 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.904998 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w552w\" (UniqueName: \"kubernetes.io/projected/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-kube-api-access-w552w\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:43 crc kubenswrapper[4708]: I0203 07:21:43.941388 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:44 crc kubenswrapper[4708]: I0203 07:21:44.429321 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n"] Feb 03 07:21:44 crc kubenswrapper[4708]: I0203 07:21:44.939750 4708 generic.go:334] "Generic (PLEG): container finished" podID="20d02b2d-b83b-4dcf-ac9d-bffece1d430c" containerID="411a13f55a1f00e031efa157f45c0a69f91b6293d8c5a0e34d29005d551b1cc3" exitCode=0 Feb 03 07:21:44 crc kubenswrapper[4708]: I0203 07:21:44.939788 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" event={"ID":"20d02b2d-b83b-4dcf-ac9d-bffece1d430c","Type":"ContainerDied","Data":"411a13f55a1f00e031efa157f45c0a69f91b6293d8c5a0e34d29005d551b1cc3"} Feb 03 07:21:44 crc kubenswrapper[4708]: I0203 07:21:44.939829 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" event={"ID":"20d02b2d-b83b-4dcf-ac9d-bffece1d430c","Type":"ContainerStarted","Data":"2d88787208e5cce71f386d6547e15e2afaca6dd16a3b55f371db7439a600f97d"} Feb 03 07:21:46 crc kubenswrapper[4708]: I0203 07:21:46.957491 4708 generic.go:334] "Generic (PLEG): container finished" podID="20d02b2d-b83b-4dcf-ac9d-bffece1d430c" containerID="a03f94038a2cfea87bceb83e96a363d9bf781ddf9056ff6955b5d1bf5cf8d1b2" exitCode=0 Feb 03 07:21:46 crc kubenswrapper[4708]: I0203 07:21:46.957570 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" event={"ID":"20d02b2d-b83b-4dcf-ac9d-bffece1d430c","Type":"ContainerDied","Data":"a03f94038a2cfea87bceb83e96a363d9bf781ddf9056ff6955b5d1bf5cf8d1b2"} Feb 03 07:21:47 crc kubenswrapper[4708]: I0203 07:21:47.970251 4708 generic.go:334] "Generic (PLEG): container finished" podID="20d02b2d-b83b-4dcf-ac9d-bffece1d430c" containerID="aafce46a8c84c472fc6e6e1c0490a184b4d921285bcfc0013ecc4719fdd0f231" exitCode=0 Feb 03 07:21:47 crc kubenswrapper[4708]: I0203 07:21:47.970331 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" event={"ID":"20d02b2d-b83b-4dcf-ac9d-bffece1d430c","Type":"ContainerDied","Data":"aafce46a8c84c472fc6e6e1c0490a184b4d921285bcfc0013ecc4719fdd0f231"} Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.309345 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.342136 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-util\") pod \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.342213 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-bundle\") pod \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.342330 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w552w\" (UniqueName: \"kubernetes.io/projected/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-kube-api-access-w552w\") pod \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\" (UID: \"20d02b2d-b83b-4dcf-ac9d-bffece1d430c\") " Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.343079 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-bundle" (OuterVolumeSpecName: "bundle") pod "20d02b2d-b83b-4dcf-ac9d-bffece1d430c" (UID: "20d02b2d-b83b-4dcf-ac9d-bffece1d430c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.347978 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-kube-api-access-w552w" (OuterVolumeSpecName: "kube-api-access-w552w") pod "20d02b2d-b83b-4dcf-ac9d-bffece1d430c" (UID: "20d02b2d-b83b-4dcf-ac9d-bffece1d430c"). InnerVolumeSpecName "kube-api-access-w552w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.361110 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-util" (OuterVolumeSpecName: "util") pod "20d02b2d-b83b-4dcf-ac9d-bffece1d430c" (UID: "20d02b2d-b83b-4dcf-ac9d-bffece1d430c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.443407 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w552w\" (UniqueName: \"kubernetes.io/projected/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-kube-api-access-w552w\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.443663 4708 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-util\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.443773 4708 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/20d02b2d-b83b-4dcf-ac9d-bffece1d430c-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.984982 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" event={"ID":"20d02b2d-b83b-4dcf-ac9d-bffece1d430c","Type":"ContainerDied","Data":"2d88787208e5cce71f386d6547e15e2afaca6dd16a3b55f371db7439a600f97d"} Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.985023 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d88787208e5cce71f386d6547e15e2afaca6dd16a3b55f371db7439a600f97d" Feb 03 07:21:49 crc kubenswrapper[4708]: I0203 07:21:49.985077 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.420517 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-jx7x5"] Feb 03 07:21:55 crc kubenswrapper[4708]: E0203 07:21:55.421260 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20d02b2d-b83b-4dcf-ac9d-bffece1d430c" containerName="pull" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.421274 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="20d02b2d-b83b-4dcf-ac9d-bffece1d430c" containerName="pull" Feb 03 07:21:55 crc kubenswrapper[4708]: E0203 07:21:55.421298 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20d02b2d-b83b-4dcf-ac9d-bffece1d430c" containerName="extract" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.421306 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="20d02b2d-b83b-4dcf-ac9d-bffece1d430c" containerName="extract" Feb 03 07:21:55 crc kubenswrapper[4708]: E0203 07:21:55.421320 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20d02b2d-b83b-4dcf-ac9d-bffece1d430c" containerName="util" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.421329 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="20d02b2d-b83b-4dcf-ac9d-bffece1d430c" containerName="util" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.421438 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="20d02b2d-b83b-4dcf-ac9d-bffece1d430c" containerName="extract" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.421909 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-jx7x5" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.429151 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-vnsdx" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.429213 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.429151 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.434951 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-jx7x5"] Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.523249 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbsk5\" (UniqueName: \"kubernetes.io/projected/3396f4c1-fb82-428b-bde6-0f30b8bf6c59-kube-api-access-tbsk5\") pod \"nmstate-operator-646758c888-jx7x5\" (UID: \"3396f4c1-fb82-428b-bde6-0f30b8bf6c59\") " pod="openshift-nmstate/nmstate-operator-646758c888-jx7x5" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.624205 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbsk5\" (UniqueName: \"kubernetes.io/projected/3396f4c1-fb82-428b-bde6-0f30b8bf6c59-kube-api-access-tbsk5\") pod \"nmstate-operator-646758c888-jx7x5\" (UID: \"3396f4c1-fb82-428b-bde6-0f30b8bf6c59\") " pod="openshift-nmstate/nmstate-operator-646758c888-jx7x5" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.646662 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbsk5\" (UniqueName: \"kubernetes.io/projected/3396f4c1-fb82-428b-bde6-0f30b8bf6c59-kube-api-access-tbsk5\") pod \"nmstate-operator-646758c888-jx7x5\" (UID: \"3396f4c1-fb82-428b-bde6-0f30b8bf6c59\") " pod="openshift-nmstate/nmstate-operator-646758c888-jx7x5" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.739162 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-jx7x5" Feb 03 07:21:55 crc kubenswrapper[4708]: I0203 07:21:55.970950 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-jx7x5"] Feb 03 07:21:55 crc kubenswrapper[4708]: W0203 07:21:55.975025 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3396f4c1_fb82_428b_bde6_0f30b8bf6c59.slice/crio-e14ca689b820b7346cd23f95e4b9cf1318c1d30dd857e749be0a3ce2032e476f WatchSource:0}: Error finding container e14ca689b820b7346cd23f95e4b9cf1318c1d30dd857e749be0a3ce2032e476f: Status 404 returned error can't find the container with id e14ca689b820b7346cd23f95e4b9cf1318c1d30dd857e749be0a3ce2032e476f Feb 03 07:21:56 crc kubenswrapper[4708]: I0203 07:21:56.024420 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-jx7x5" event={"ID":"3396f4c1-fb82-428b-bde6-0f30b8bf6c59","Type":"ContainerStarted","Data":"e14ca689b820b7346cd23f95e4b9cf1318c1d30dd857e749be0a3ce2032e476f"} Feb 03 07:21:59 crc kubenswrapper[4708]: I0203 07:21:59.046313 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-jx7x5" event={"ID":"3396f4c1-fb82-428b-bde6-0f30b8bf6c59","Type":"ContainerStarted","Data":"a660f1efeeed17ac6e10aa6c839ecf6497dc2c22dfeed35217825e76f70c6025"} Feb 03 07:21:59 crc kubenswrapper[4708]: I0203 07:21:59.084448 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-jx7x5" podStartSLOduration=2.07558769 podStartE2EDuration="4.084412243s" podCreationTimestamp="2026-02-03 07:21:55 +0000 UTC" firstStartedPulling="2026-02-03 07:21:55.976651114 +0000 UTC m=+694.958597931" lastFinishedPulling="2026-02-03 07:21:57.985475677 +0000 UTC m=+696.967422484" observedRunningTime="2026-02-03 07:21:59.065705678 +0000 UTC m=+698.047652485" watchObservedRunningTime="2026-02-03 07:21:59.084412243 +0000 UTC m=+698.066359100" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.103262 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-vrkbt"] Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.104917 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-vrkbt" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.106659 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-xrptg" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.114977 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x"] Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.116179 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.119091 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-vrkbt"] Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.119642 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.140806 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x"] Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.148953 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p57wq\" (UniqueName: \"kubernetes.io/projected/f3e72a2c-73aa-410e-8386-1a2e6b510d4f-kube-api-access-p57wq\") pod \"nmstate-metrics-54757c584b-vrkbt\" (UID: \"f3e72a2c-73aa-410e-8386-1a2e6b510d4f\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-vrkbt" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.149015 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-b725x\" (UID: \"4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.149169 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnnsv\" (UniqueName: \"kubernetes.io/projected/4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e-kube-api-access-lnnsv\") pod \"nmstate-webhook-8474b5b9d8-b725x\" (UID: \"4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.149453 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-ls24k"] Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.150275 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.245968 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n"] Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.246630 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.249277 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.249347 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.249894 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p57wq\" (UniqueName: \"kubernetes.io/projected/f3e72a2c-73aa-410e-8386-1a2e6b510d4f-kube-api-access-p57wq\") pod \"nmstate-metrics-54757c584b-vrkbt\" (UID: \"f3e72a2c-73aa-410e-8386-1a2e6b510d4f\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-vrkbt" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.249955 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-b725x\" (UID: \"4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.249984 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24w2x\" (UniqueName: \"kubernetes.io/projected/5c6dbe91-1ee1-4629-bbee-e661af990956-kube-api-access-24w2x\") pod \"nmstate-console-plugin-7754f76f8b-nwm6n\" (UID: \"5c6dbe91-1ee1-4629-bbee-e661af990956\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.250049 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqmv7\" (UniqueName: \"kubernetes.io/projected/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-kube-api-access-pqmv7\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.250077 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-dbus-socket\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.250094 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-ovs-socket\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.250114 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnnsv\" (UniqueName: \"kubernetes.io/projected/4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e-kube-api-access-lnnsv\") pod \"nmstate-webhook-8474b5b9d8-b725x\" (UID: \"4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.250207 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5c6dbe91-1ee1-4629-bbee-e661af990956-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-nwm6n\" (UID: \"5c6dbe91-1ee1-4629-bbee-e661af990956\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.250286 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-nmstate-lock\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.250330 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5c6dbe91-1ee1-4629-bbee-e661af990956-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-nwm6n\" (UID: \"5c6dbe91-1ee1-4629-bbee-e661af990956\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.251808 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-5dhfb" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.257145 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-b725x\" (UID: \"4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.265483 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n"] Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.267246 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnnsv\" (UniqueName: \"kubernetes.io/projected/4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e-kube-api-access-lnnsv\") pod \"nmstate-webhook-8474b5b9d8-b725x\" (UID: \"4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.276637 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p57wq\" (UniqueName: \"kubernetes.io/projected/f3e72a2c-73aa-410e-8386-1a2e6b510d4f-kube-api-access-p57wq\") pod \"nmstate-metrics-54757c584b-vrkbt\" (UID: \"f3e72a2c-73aa-410e-8386-1a2e6b510d4f\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-vrkbt" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.353070 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqmv7\" (UniqueName: \"kubernetes.io/projected/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-kube-api-access-pqmv7\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.353148 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-ovs-socket\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.353175 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-dbus-socket\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.353205 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5c6dbe91-1ee1-4629-bbee-e661af990956-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-nwm6n\" (UID: \"5c6dbe91-1ee1-4629-bbee-e661af990956\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.353232 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-nmstate-lock\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.353260 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5c6dbe91-1ee1-4629-bbee-e661af990956-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-nwm6n\" (UID: \"5c6dbe91-1ee1-4629-bbee-e661af990956\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.353321 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24w2x\" (UniqueName: \"kubernetes.io/projected/5c6dbe91-1ee1-4629-bbee-e661af990956-kube-api-access-24w2x\") pod \"nmstate-console-plugin-7754f76f8b-nwm6n\" (UID: \"5c6dbe91-1ee1-4629-bbee-e661af990956\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.353865 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-ovs-socket\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.353996 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-nmstate-lock\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.354144 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-dbus-socket\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.354784 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5c6dbe91-1ee1-4629-bbee-e661af990956-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-nwm6n\" (UID: \"5c6dbe91-1ee1-4629-bbee-e661af990956\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.363544 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5c6dbe91-1ee1-4629-bbee-e661af990956-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-nwm6n\" (UID: \"5c6dbe91-1ee1-4629-bbee-e661af990956\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.382964 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqmv7\" (UniqueName: \"kubernetes.io/projected/09450ee3-4732-4c81-8bf1-cca9c8d8fdc6-kube-api-access-pqmv7\") pod \"nmstate-handler-ls24k\" (UID: \"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6\") " pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.386484 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24w2x\" (UniqueName: \"kubernetes.io/projected/5c6dbe91-1ee1-4629-bbee-e661af990956-kube-api-access-24w2x\") pod \"nmstate-console-plugin-7754f76f8b-nwm6n\" (UID: \"5c6dbe91-1ee1-4629-bbee-e661af990956\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.425043 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-vrkbt" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.433361 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-644c8bdbd-8p4s7"] Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.434359 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.439868 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.453930 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-644c8bdbd-8p4s7"] Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.454421 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1568fb64-af24-4213-bbbc-d6a4a143f7ca-console-oauth-config\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.454470 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-console-config\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.454503 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-oauth-serving-cert\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.454527 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-service-ca\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.454568 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k45f\" (UniqueName: \"kubernetes.io/projected/1568fb64-af24-4213-bbbc-d6a4a143f7ca-kube-api-access-8k45f\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.454599 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1568fb64-af24-4213-bbbc-d6a4a143f7ca-console-serving-cert\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.454628 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-trusted-ca-bundle\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.477292 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.556269 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-oauth-serving-cert\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.556303 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-service-ca\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.556331 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k45f\" (UniqueName: \"kubernetes.io/projected/1568fb64-af24-4213-bbbc-d6a4a143f7ca-kube-api-access-8k45f\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.556359 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1568fb64-af24-4213-bbbc-d6a4a143f7ca-console-serving-cert\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.556382 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-trusted-ca-bundle\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.556438 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1568fb64-af24-4213-bbbc-d6a4a143f7ca-console-oauth-config\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.556453 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-console-config\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.557537 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-console-config\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.558068 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-oauth-serving-cert\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.558596 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-service-ca\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.560364 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1568fb64-af24-4213-bbbc-d6a4a143f7ca-trusted-ca-bundle\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.564519 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1568fb64-af24-4213-bbbc-d6a4a143f7ca-console-serving-cert\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.564885 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1568fb64-af24-4213-bbbc-d6a4a143f7ca-console-oauth-config\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.598100 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k45f\" (UniqueName: \"kubernetes.io/projected/1568fb64-af24-4213-bbbc-d6a4a143f7ca-kube-api-access-8k45f\") pod \"console-644c8bdbd-8p4s7\" (UID: \"1568fb64-af24-4213-bbbc-d6a4a143f7ca\") " pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.607147 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.731306 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x"] Feb 03 07:22:04 crc kubenswrapper[4708]: W0203 07:22:04.735137 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4bcad1f4_a07d_4a93_8b5b_b6df72d2e34e.slice/crio-b8ec0097ec797f2a50c9460a3bd48c702c74104615b6a5b51b037d41f8386988 WatchSource:0}: Error finding container b8ec0097ec797f2a50c9460a3bd48c702c74104615b6a5b51b037d41f8386988: Status 404 returned error can't find the container with id b8ec0097ec797f2a50c9460a3bd48c702c74104615b6a5b51b037d41f8386988 Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.767586 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.930028 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-644c8bdbd-8p4s7"] Feb 03 07:22:04 crc kubenswrapper[4708]: W0203 07:22:04.935190 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1568fb64_af24_4213_bbbc_d6a4a143f7ca.slice/crio-b1da86b318fbdc5855df408790b57bc0b93fe083f183497addd067471b9f6989 WatchSource:0}: Error finding container b1da86b318fbdc5855df408790b57bc0b93fe083f183497addd067471b9f6989: Status 404 returned error can't find the container with id b1da86b318fbdc5855df408790b57bc0b93fe083f183497addd067471b9f6989 Feb 03 07:22:04 crc kubenswrapper[4708]: I0203 07:22:04.994125 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-vrkbt"] Feb 03 07:22:05 crc kubenswrapper[4708]: W0203 07:22:05.071829 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c6dbe91_1ee1_4629_bbee_e661af990956.slice/crio-c39eaaea7670b99273454881800706a23ac7b04b59b85f01fd46da72d22ed615 WatchSource:0}: Error finding container c39eaaea7670b99273454881800706a23ac7b04b59b85f01fd46da72d22ed615: Status 404 returned error can't find the container with id c39eaaea7670b99273454881800706a23ac7b04b59b85f01fd46da72d22ed615 Feb 03 07:22:05 crc kubenswrapper[4708]: I0203 07:22:05.071940 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n"] Feb 03 07:22:05 crc kubenswrapper[4708]: I0203 07:22:05.082784 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ls24k" event={"ID":"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6","Type":"ContainerStarted","Data":"6c5774a97e03c42a314dcf3f3a24fb57bc783ee8205a90bf10a529d3d8b0e240"} Feb 03 07:22:05 crc kubenswrapper[4708]: I0203 07:22:05.083958 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-vrkbt" event={"ID":"f3e72a2c-73aa-410e-8386-1a2e6b510d4f","Type":"ContainerStarted","Data":"5e6a4a5eaba4f58e484db1cd0870af0acb0a4dfb1fac212460cfa2eb62ffe739"} Feb 03 07:22:05 crc kubenswrapper[4708]: I0203 07:22:05.084866 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" event={"ID":"4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e","Type":"ContainerStarted","Data":"b8ec0097ec797f2a50c9460a3bd48c702c74104615b6a5b51b037d41f8386988"} Feb 03 07:22:05 crc kubenswrapper[4708]: I0203 07:22:05.085991 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-644c8bdbd-8p4s7" event={"ID":"1568fb64-af24-4213-bbbc-d6a4a143f7ca","Type":"ContainerStarted","Data":"e13b848ffa7ebd895ed8a81bf37dd8f6a22dfcb2c4102814c08c6669bc4afdf7"} Feb 03 07:22:05 crc kubenswrapper[4708]: I0203 07:22:05.086137 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-644c8bdbd-8p4s7" event={"ID":"1568fb64-af24-4213-bbbc-d6a4a143f7ca","Type":"ContainerStarted","Data":"b1da86b318fbdc5855df408790b57bc0b93fe083f183497addd067471b9f6989"} Feb 03 07:22:05 crc kubenswrapper[4708]: I0203 07:22:05.086925 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" event={"ID":"5c6dbe91-1ee1-4629-bbee-e661af990956","Type":"ContainerStarted","Data":"c39eaaea7670b99273454881800706a23ac7b04b59b85f01fd46da72d22ed615"} Feb 03 07:22:08 crc kubenswrapper[4708]: I0203 07:22:08.107833 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ls24k" event={"ID":"09450ee3-4732-4c81-8bf1-cca9c8d8fdc6","Type":"ContainerStarted","Data":"965017486d811e25d879ed5c3068ce5872e9fb10ebc228cc7d91eb8cde8f40da"} Feb 03 07:22:08 crc kubenswrapper[4708]: I0203 07:22:08.108551 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:08 crc kubenswrapper[4708]: I0203 07:22:08.110352 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-vrkbt" event={"ID":"f3e72a2c-73aa-410e-8386-1a2e6b510d4f","Type":"ContainerStarted","Data":"9ee28be6d4901f26f073679abd6e8f00939929334595ab607f36daabeaf00555"} Feb 03 07:22:08 crc kubenswrapper[4708]: I0203 07:22:08.112558 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" event={"ID":"4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e","Type":"ContainerStarted","Data":"b53e9a8fc88a1329bb02289246d8c149652d25099526a0d09ac702e329dde0c2"} Feb 03 07:22:08 crc kubenswrapper[4708]: I0203 07:22:08.112706 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" Feb 03 07:22:08 crc kubenswrapper[4708]: I0203 07:22:08.120952 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-644c8bdbd-8p4s7" podStartSLOduration=4.120941617 podStartE2EDuration="4.120941617s" podCreationTimestamp="2026-02-03 07:22:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:22:05.107118203 +0000 UTC m=+704.089065030" watchObservedRunningTime="2026-02-03 07:22:08.120941617 +0000 UTC m=+707.102888414" Feb 03 07:22:08 crc kubenswrapper[4708]: I0203 07:22:08.124199 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-ls24k" podStartSLOduration=1.647904783 podStartE2EDuration="4.124191946s" podCreationTimestamp="2026-02-03 07:22:04 +0000 UTC" firstStartedPulling="2026-02-03 07:22:04.592501977 +0000 UTC m=+703.574448784" lastFinishedPulling="2026-02-03 07:22:07.06878909 +0000 UTC m=+706.050735947" observedRunningTime="2026-02-03 07:22:08.123037089 +0000 UTC m=+707.104983896" watchObservedRunningTime="2026-02-03 07:22:08.124191946 +0000 UTC m=+707.106138753" Feb 03 07:22:08 crc kubenswrapper[4708]: I0203 07:22:08.138279 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" podStartSLOduration=1.805315063 podStartE2EDuration="4.138264339s" podCreationTimestamp="2026-02-03 07:22:04 +0000 UTC" firstStartedPulling="2026-02-03 07:22:04.747350544 +0000 UTC m=+703.729297341" lastFinishedPulling="2026-02-03 07:22:07.0802998 +0000 UTC m=+706.062246617" observedRunningTime="2026-02-03 07:22:08.135367008 +0000 UTC m=+707.117313825" watchObservedRunningTime="2026-02-03 07:22:08.138264339 +0000 UTC m=+707.120211146" Feb 03 07:22:09 crc kubenswrapper[4708]: I0203 07:22:09.121001 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" event={"ID":"5c6dbe91-1ee1-4629-bbee-e661af990956","Type":"ContainerStarted","Data":"fbebceaf896bec8064c697371cce240354958d153fbf9a8cb45849a9245279eb"} Feb 03 07:22:09 crc kubenswrapper[4708]: I0203 07:22:09.137640 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-nwm6n" podStartSLOduration=1.891282412 podStartE2EDuration="5.137618732s" podCreationTimestamp="2026-02-03 07:22:04 +0000 UTC" firstStartedPulling="2026-02-03 07:22:05.074403197 +0000 UTC m=+704.056350004" lastFinishedPulling="2026-02-03 07:22:08.320739517 +0000 UTC m=+707.302686324" observedRunningTime="2026-02-03 07:22:09.136385272 +0000 UTC m=+708.118332099" watchObservedRunningTime="2026-02-03 07:22:09.137618732 +0000 UTC m=+708.119565539" Feb 03 07:22:10 crc kubenswrapper[4708]: I0203 07:22:10.129730 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-vrkbt" event={"ID":"f3e72a2c-73aa-410e-8386-1a2e6b510d4f","Type":"ContainerStarted","Data":"773488a93020d4ffe1749e98c31bfbc6cffbc810d490da6ec4d66003e9e0e836"} Feb 03 07:22:10 crc kubenswrapper[4708]: I0203 07:22:10.155495 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-vrkbt" podStartSLOduration=1.73906142 podStartE2EDuration="6.155475306s" podCreationTimestamp="2026-02-03 07:22:04 +0000 UTC" firstStartedPulling="2026-02-03 07:22:05.000196472 +0000 UTC m=+703.982143289" lastFinishedPulling="2026-02-03 07:22:09.416610368 +0000 UTC m=+708.398557175" observedRunningTime="2026-02-03 07:22:10.153185241 +0000 UTC m=+709.135132118" watchObservedRunningTime="2026-02-03 07:22:10.155475306 +0000 UTC m=+709.137422133" Feb 03 07:22:14 crc kubenswrapper[4708]: I0203 07:22:14.507602 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-ls24k" Feb 03 07:22:14 crc kubenswrapper[4708]: I0203 07:22:14.768349 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:14 crc kubenswrapper[4708]: I0203 07:22:14.768899 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:14 crc kubenswrapper[4708]: I0203 07:22:14.775438 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:15 crc kubenswrapper[4708]: I0203 07:22:15.170036 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-644c8bdbd-8p4s7" Feb 03 07:22:15 crc kubenswrapper[4708]: I0203 07:22:15.236286 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-zd8kn"] Feb 03 07:22:24 crc kubenswrapper[4708]: I0203 07:22:24.447005 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-b725x" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.502238 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf"] Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.504397 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.506395 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.513581 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf"] Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.704496 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.704585 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.704643 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbcgc\" (UniqueName: \"kubernetes.io/projected/402c1015-f80b-44cf-aab2-afd529531cfd-kube-api-access-tbcgc\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.805901 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.805995 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.806057 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbcgc\" (UniqueName: \"kubernetes.io/projected/402c1015-f80b-44cf-aab2-afd529531cfd-kube-api-access-tbcgc\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.806536 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.806748 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.832820 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbcgc\" (UniqueName: \"kubernetes.io/projected/402c1015-f80b-44cf-aab2-afd529531cfd-kube-api-access-tbcgc\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:38 crc kubenswrapper[4708]: I0203 07:22:38.847159 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:39 crc kubenswrapper[4708]: I0203 07:22:39.280835 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf"] Feb 03 07:22:39 crc kubenswrapper[4708]: I0203 07:22:39.338709 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" event={"ID":"402c1015-f80b-44cf-aab2-afd529531cfd","Type":"ContainerStarted","Data":"e465ecbbdbe50f1a0ffa82e8dca73b344c948e40d08881d31b7f2ebf86d7a157"} Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.283233 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-zd8kn" podUID="46ee42c1-592d-47c3-85ba-ead60edf7aca" containerName="console" containerID="cri-o://cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711" gracePeriod=15 Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.354393 4708 generic.go:334] "Generic (PLEG): container finished" podID="402c1015-f80b-44cf-aab2-afd529531cfd" containerID="8810b8eb448f27a80a160f8d537ea1c25735329cdfaf090a03d96f15987a9179" exitCode=0 Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.354465 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" event={"ID":"402c1015-f80b-44cf-aab2-afd529531cfd","Type":"ContainerDied","Data":"8810b8eb448f27a80a160f8d537ea1c25735329cdfaf090a03d96f15987a9179"} Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.718383 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-zd8kn_46ee42c1-592d-47c3-85ba-ead60edf7aca/console/0.log" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.718462 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.734978 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-service-ca\") pod \"46ee42c1-592d-47c3-85ba-ead60edf7aca\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.735045 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-trusted-ca-bundle\") pod \"46ee42c1-592d-47c3-85ba-ead60edf7aca\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.735080 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8246l\" (UniqueName: \"kubernetes.io/projected/46ee42c1-592d-47c3-85ba-ead60edf7aca-kube-api-access-8246l\") pod \"46ee42c1-592d-47c3-85ba-ead60edf7aca\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.735160 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-serving-cert\") pod \"46ee42c1-592d-47c3-85ba-ead60edf7aca\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.735254 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-oauth-config\") pod \"46ee42c1-592d-47c3-85ba-ead60edf7aca\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.735282 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-oauth-serving-cert\") pod \"46ee42c1-592d-47c3-85ba-ead60edf7aca\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.735316 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-config\") pod \"46ee42c1-592d-47c3-85ba-ead60edf7aca\" (UID: \"46ee42c1-592d-47c3-85ba-ead60edf7aca\") " Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.737168 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "46ee42c1-592d-47c3-85ba-ead60edf7aca" (UID: "46ee42c1-592d-47c3-85ba-ead60edf7aca"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.737265 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-config" (OuterVolumeSpecName: "console-config") pod "46ee42c1-592d-47c3-85ba-ead60edf7aca" (UID: "46ee42c1-592d-47c3-85ba-ead60edf7aca"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.737957 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-service-ca" (OuterVolumeSpecName: "service-ca") pod "46ee42c1-592d-47c3-85ba-ead60edf7aca" (UID: "46ee42c1-592d-47c3-85ba-ead60edf7aca"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.738175 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "46ee42c1-592d-47c3-85ba-ead60edf7aca" (UID: "46ee42c1-592d-47c3-85ba-ead60edf7aca"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.749211 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "46ee42c1-592d-47c3-85ba-ead60edf7aca" (UID: "46ee42c1-592d-47c3-85ba-ead60edf7aca"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.749530 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46ee42c1-592d-47c3-85ba-ead60edf7aca-kube-api-access-8246l" (OuterVolumeSpecName: "kube-api-access-8246l") pod "46ee42c1-592d-47c3-85ba-ead60edf7aca" (UID: "46ee42c1-592d-47c3-85ba-ead60edf7aca"). InnerVolumeSpecName "kube-api-access-8246l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.750340 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "46ee42c1-592d-47c3-85ba-ead60edf7aca" (UID: "46ee42c1-592d-47c3-85ba-ead60edf7aca"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.837261 4708 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.838102 4708 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.838125 4708 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.838142 4708 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-console-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.838163 4708 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-service-ca\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.838179 4708 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/46ee42c1-592d-47c3-85ba-ead60edf7aca-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:40 crc kubenswrapper[4708]: I0203 07:22:40.838195 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8246l\" (UniqueName: \"kubernetes.io/projected/46ee42c1-592d-47c3-85ba-ead60edf7aca-kube-api-access-8246l\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:41 crc kubenswrapper[4708]: I0203 07:22:41.368213 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-zd8kn_46ee42c1-592d-47c3-85ba-ead60edf7aca/console/0.log" Feb 03 07:22:41 crc kubenswrapper[4708]: I0203 07:22:41.368288 4708 generic.go:334] "Generic (PLEG): container finished" podID="46ee42c1-592d-47c3-85ba-ead60edf7aca" containerID="cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711" exitCode=2 Feb 03 07:22:41 crc kubenswrapper[4708]: I0203 07:22:41.368332 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zd8kn" event={"ID":"46ee42c1-592d-47c3-85ba-ead60edf7aca","Type":"ContainerDied","Data":"cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711"} Feb 03 07:22:41 crc kubenswrapper[4708]: I0203 07:22:41.368372 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zd8kn" event={"ID":"46ee42c1-592d-47c3-85ba-ead60edf7aca","Type":"ContainerDied","Data":"b10290eea3eea1ee6ce16abf81570cc0e941ff5723c86c14615959e660bb82cd"} Feb 03 07:22:41 crc kubenswrapper[4708]: I0203 07:22:41.368425 4708 scope.go:117] "RemoveContainer" containerID="cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711" Feb 03 07:22:41 crc kubenswrapper[4708]: I0203 07:22:41.368652 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zd8kn" Feb 03 07:22:41 crc kubenswrapper[4708]: I0203 07:22:41.410962 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-zd8kn"] Feb 03 07:22:41 crc kubenswrapper[4708]: I0203 07:22:41.415719 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-zd8kn"] Feb 03 07:22:41 crc kubenswrapper[4708]: I0203 07:22:41.484360 4708 scope.go:117] "RemoveContainer" containerID="cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711" Feb 03 07:22:41 crc kubenswrapper[4708]: E0203 07:22:41.485251 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711\": container with ID starting with cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711 not found: ID does not exist" containerID="cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711" Feb 03 07:22:41 crc kubenswrapper[4708]: I0203 07:22:41.485297 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711"} err="failed to get container status \"cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711\": rpc error: code = NotFound desc = could not find container \"cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711\": container with ID starting with cd92d92f054982bdf31a98c86bd7fe25b74e1231a5ec30c339dd5d26e7132711 not found: ID does not exist" Feb 03 07:22:42 crc kubenswrapper[4708]: I0203 07:22:42.101112 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46ee42c1-592d-47c3-85ba-ead60edf7aca" path="/var/lib/kubelet/pods/46ee42c1-592d-47c3-85ba-ead60edf7aca/volumes" Feb 03 07:22:42 crc kubenswrapper[4708]: I0203 07:22:42.376886 4708 generic.go:334] "Generic (PLEG): container finished" podID="402c1015-f80b-44cf-aab2-afd529531cfd" containerID="8b114927f33a13e3118e3a5494cb10bfc6a85186f0b11d6525f48cf4c073e47c" exitCode=0 Feb 03 07:22:42 crc kubenswrapper[4708]: I0203 07:22:42.376956 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" event={"ID":"402c1015-f80b-44cf-aab2-afd529531cfd","Type":"ContainerDied","Data":"8b114927f33a13e3118e3a5494cb10bfc6a85186f0b11d6525f48cf4c073e47c"} Feb 03 07:22:43 crc kubenswrapper[4708]: I0203 07:22:43.389444 4708 generic.go:334] "Generic (PLEG): container finished" podID="402c1015-f80b-44cf-aab2-afd529531cfd" containerID="709668afdba219fd3b0afab269372a5e251a27254a48e22580c8b78dad0c9b5b" exitCode=0 Feb 03 07:22:43 crc kubenswrapper[4708]: I0203 07:22:43.390297 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" event={"ID":"402c1015-f80b-44cf-aab2-afd529531cfd","Type":"ContainerDied","Data":"709668afdba219fd3b0afab269372a5e251a27254a48e22580c8b78dad0c9b5b"} Feb 03 07:22:44 crc kubenswrapper[4708]: I0203 07:22:44.670196 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:44 crc kubenswrapper[4708]: I0203 07:22:44.690222 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-bundle\") pod \"402c1015-f80b-44cf-aab2-afd529531cfd\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " Feb 03 07:22:44 crc kubenswrapper[4708]: I0203 07:22:44.690385 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-util\") pod \"402c1015-f80b-44cf-aab2-afd529531cfd\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " Feb 03 07:22:44 crc kubenswrapper[4708]: I0203 07:22:44.690432 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbcgc\" (UniqueName: \"kubernetes.io/projected/402c1015-f80b-44cf-aab2-afd529531cfd-kube-api-access-tbcgc\") pod \"402c1015-f80b-44cf-aab2-afd529531cfd\" (UID: \"402c1015-f80b-44cf-aab2-afd529531cfd\") " Feb 03 07:22:44 crc kubenswrapper[4708]: I0203 07:22:44.692114 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-bundle" (OuterVolumeSpecName: "bundle") pod "402c1015-f80b-44cf-aab2-afd529531cfd" (UID: "402c1015-f80b-44cf-aab2-afd529531cfd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:22:44 crc kubenswrapper[4708]: I0203 07:22:44.695865 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/402c1015-f80b-44cf-aab2-afd529531cfd-kube-api-access-tbcgc" (OuterVolumeSpecName: "kube-api-access-tbcgc") pod "402c1015-f80b-44cf-aab2-afd529531cfd" (UID: "402c1015-f80b-44cf-aab2-afd529531cfd"). InnerVolumeSpecName "kube-api-access-tbcgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:22:44 crc kubenswrapper[4708]: I0203 07:22:44.792782 4708 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:44 crc kubenswrapper[4708]: I0203 07:22:44.792852 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbcgc\" (UniqueName: \"kubernetes.io/projected/402c1015-f80b-44cf-aab2-afd529531cfd-kube-api-access-tbcgc\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:44 crc kubenswrapper[4708]: I0203 07:22:44.888682 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-util" (OuterVolumeSpecName: "util") pod "402c1015-f80b-44cf-aab2-afd529531cfd" (UID: "402c1015-f80b-44cf-aab2-afd529531cfd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:22:44 crc kubenswrapper[4708]: I0203 07:22:44.894129 4708 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/402c1015-f80b-44cf-aab2-afd529531cfd-util\") on node \"crc\" DevicePath \"\"" Feb 03 07:22:45 crc kubenswrapper[4708]: I0203 07:22:45.403896 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" event={"ID":"402c1015-f80b-44cf-aab2-afd529531cfd","Type":"ContainerDied","Data":"e465ecbbdbe50f1a0ffa82e8dca73b344c948e40d08881d31b7f2ebf86d7a157"} Feb 03 07:22:45 crc kubenswrapper[4708]: I0203 07:22:45.404292 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e465ecbbdbe50f1a0ffa82e8dca73b344c948e40d08881d31b7f2ebf86d7a157" Feb 03 07:22:45 crc kubenswrapper[4708]: I0203 07:22:45.404190 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.491582 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2"] Feb 03 07:22:53 crc kubenswrapper[4708]: E0203 07:22:53.492412 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="402c1015-f80b-44cf-aab2-afd529531cfd" containerName="util" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.492431 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="402c1015-f80b-44cf-aab2-afd529531cfd" containerName="util" Feb 03 07:22:53 crc kubenswrapper[4708]: E0203 07:22:53.492473 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46ee42c1-592d-47c3-85ba-ead60edf7aca" containerName="console" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.492482 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="46ee42c1-592d-47c3-85ba-ead60edf7aca" containerName="console" Feb 03 07:22:53 crc kubenswrapper[4708]: E0203 07:22:53.492496 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="402c1015-f80b-44cf-aab2-afd529531cfd" containerName="extract" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.492505 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="402c1015-f80b-44cf-aab2-afd529531cfd" containerName="extract" Feb 03 07:22:53 crc kubenswrapper[4708]: E0203 07:22:53.492514 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="402c1015-f80b-44cf-aab2-afd529531cfd" containerName="pull" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.492520 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="402c1015-f80b-44cf-aab2-afd529531cfd" containerName="pull" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.492642 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="402c1015-f80b-44cf-aab2-afd529531cfd" containerName="extract" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.492654 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="46ee42c1-592d-47c3-85ba-ead60edf7aca" containerName="console" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.493157 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.495030 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-z5fwz" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.495454 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.497320 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.497786 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.502191 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.520074 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2"] Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.603221 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpv7l\" (UniqueName: \"kubernetes.io/projected/c67268c0-d17c-4659-829e-2865b70963f0-kube-api-access-rpv7l\") pod \"metallb-operator-controller-manager-7f7b788fc7-dmpn2\" (UID: \"c67268c0-d17c-4659-829e-2865b70963f0\") " pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.603270 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c67268c0-d17c-4659-829e-2865b70963f0-webhook-cert\") pod \"metallb-operator-controller-manager-7f7b788fc7-dmpn2\" (UID: \"c67268c0-d17c-4659-829e-2865b70963f0\") " pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.603466 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c67268c0-d17c-4659-829e-2865b70963f0-apiservice-cert\") pod \"metallb-operator-controller-manager-7f7b788fc7-dmpn2\" (UID: \"c67268c0-d17c-4659-829e-2865b70963f0\") " pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.704300 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c67268c0-d17c-4659-829e-2865b70963f0-apiservice-cert\") pod \"metallb-operator-controller-manager-7f7b788fc7-dmpn2\" (UID: \"c67268c0-d17c-4659-829e-2865b70963f0\") " pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.704877 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpv7l\" (UniqueName: \"kubernetes.io/projected/c67268c0-d17c-4659-829e-2865b70963f0-kube-api-access-rpv7l\") pod \"metallb-operator-controller-manager-7f7b788fc7-dmpn2\" (UID: \"c67268c0-d17c-4659-829e-2865b70963f0\") " pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.704998 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c67268c0-d17c-4659-829e-2865b70963f0-webhook-cert\") pod \"metallb-operator-controller-manager-7f7b788fc7-dmpn2\" (UID: \"c67268c0-d17c-4659-829e-2865b70963f0\") " pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.711459 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c67268c0-d17c-4659-829e-2865b70963f0-apiservice-cert\") pod \"metallb-operator-controller-manager-7f7b788fc7-dmpn2\" (UID: \"c67268c0-d17c-4659-829e-2865b70963f0\") " pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.717518 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c67268c0-d17c-4659-829e-2865b70963f0-webhook-cert\") pod \"metallb-operator-controller-manager-7f7b788fc7-dmpn2\" (UID: \"c67268c0-d17c-4659-829e-2865b70963f0\") " pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.737165 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v"] Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.738029 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.739600 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.739602 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.740021 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-zr4d5" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.748539 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpv7l\" (UniqueName: \"kubernetes.io/projected/c67268c0-d17c-4659-829e-2865b70963f0-kube-api-access-rpv7l\") pod \"metallb-operator-controller-manager-7f7b788fc7-dmpn2\" (UID: \"c67268c0-d17c-4659-829e-2865b70963f0\") " pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.753450 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v"] Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.811435 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.907873 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/85431c28-b637-4bab-b63b-982307ac860c-apiservice-cert\") pod \"metallb-operator-webhook-server-7d65755cd8-mqk5v\" (UID: \"85431c28-b637-4bab-b63b-982307ac860c\") " pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.907946 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzjbk\" (UniqueName: \"kubernetes.io/projected/85431c28-b637-4bab-b63b-982307ac860c-kube-api-access-mzjbk\") pod \"metallb-operator-webhook-server-7d65755cd8-mqk5v\" (UID: \"85431c28-b637-4bab-b63b-982307ac860c\") " pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:53 crc kubenswrapper[4708]: I0203 07:22:53.907975 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/85431c28-b637-4bab-b63b-982307ac860c-webhook-cert\") pod \"metallb-operator-webhook-server-7d65755cd8-mqk5v\" (UID: \"85431c28-b637-4bab-b63b-982307ac860c\") " pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.009677 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/85431c28-b637-4bab-b63b-982307ac860c-apiservice-cert\") pod \"metallb-operator-webhook-server-7d65755cd8-mqk5v\" (UID: \"85431c28-b637-4bab-b63b-982307ac860c\") " pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.010021 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzjbk\" (UniqueName: \"kubernetes.io/projected/85431c28-b637-4bab-b63b-982307ac860c-kube-api-access-mzjbk\") pod \"metallb-operator-webhook-server-7d65755cd8-mqk5v\" (UID: \"85431c28-b637-4bab-b63b-982307ac860c\") " pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.010043 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/85431c28-b637-4bab-b63b-982307ac860c-webhook-cert\") pod \"metallb-operator-webhook-server-7d65755cd8-mqk5v\" (UID: \"85431c28-b637-4bab-b63b-982307ac860c\") " pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.015676 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/85431c28-b637-4bab-b63b-982307ac860c-webhook-cert\") pod \"metallb-operator-webhook-server-7d65755cd8-mqk5v\" (UID: \"85431c28-b637-4bab-b63b-982307ac860c\") " pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.016220 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/85431c28-b637-4bab-b63b-982307ac860c-apiservice-cert\") pod \"metallb-operator-webhook-server-7d65755cd8-mqk5v\" (UID: \"85431c28-b637-4bab-b63b-982307ac860c\") " pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.026709 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzjbk\" (UniqueName: \"kubernetes.io/projected/85431c28-b637-4bab-b63b-982307ac860c-kube-api-access-mzjbk\") pod \"metallb-operator-webhook-server-7d65755cd8-mqk5v\" (UID: \"85431c28-b637-4bab-b63b-982307ac860c\") " pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.093227 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.267339 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2"] Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.416095 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v"] Feb 03 07:22:54 crc kubenswrapper[4708]: W0203 07:22:54.437824 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85431c28_b637_4bab_b63b_982307ac860c.slice/crio-8c3569b2df42a64ffe2d6bc6384c27d8bec04545d4b8f181c19d72e82fb81543 WatchSource:0}: Error finding container 8c3569b2df42a64ffe2d6bc6384c27d8bec04545d4b8f181c19d72e82fb81543: Status 404 returned error can't find the container with id 8c3569b2df42a64ffe2d6bc6384c27d8bec04545d4b8f181c19d72e82fb81543 Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.462818 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" event={"ID":"c67268c0-d17c-4659-829e-2865b70963f0","Type":"ContainerStarted","Data":"2159a2e6ddb0d1c364b5a5726e8526575b7f10ff6d9ffa7185c53f9ecc41bdfe"} Feb 03 07:22:54 crc kubenswrapper[4708]: I0203 07:22:54.464076 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" event={"ID":"85431c28-b637-4bab-b63b-982307ac860c","Type":"ContainerStarted","Data":"8c3569b2df42a64ffe2d6bc6384c27d8bec04545d4b8f181c19d72e82fb81543"} Feb 03 07:22:56 crc kubenswrapper[4708]: I0203 07:22:56.682623 4708 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 03 07:22:58 crc kubenswrapper[4708]: I0203 07:22:58.501864 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" event={"ID":"c67268c0-d17c-4659-829e-2865b70963f0","Type":"ContainerStarted","Data":"351e3e49ee6c95aa9485ff6d2e821a0a8a67c519eae044d76fea911df0ca9176"} Feb 03 07:22:58 crc kubenswrapper[4708]: I0203 07:22:58.502245 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:22:58 crc kubenswrapper[4708]: I0203 07:22:58.529268 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" podStartSLOduration=2.232223079 podStartE2EDuration="5.529250164s" podCreationTimestamp="2026-02-03 07:22:53 +0000 UTC" firstStartedPulling="2026-02-03 07:22:54.285028074 +0000 UTC m=+753.266974881" lastFinishedPulling="2026-02-03 07:22:57.582055159 +0000 UTC m=+756.564001966" observedRunningTime="2026-02-03 07:22:58.523105944 +0000 UTC m=+757.505052751" watchObservedRunningTime="2026-02-03 07:22:58.529250164 +0000 UTC m=+757.511196971" Feb 03 07:22:59 crc kubenswrapper[4708]: I0203 07:22:59.507837 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" event={"ID":"85431c28-b637-4bab-b63b-982307ac860c","Type":"ContainerStarted","Data":"8e960ba37e9c443fc7efb3420da876ffa8241055f1493e943d3107b8693e11d1"} Feb 03 07:22:59 crc kubenswrapper[4708]: I0203 07:22:59.529556 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" podStartSLOduration=1.851973141 podStartE2EDuration="6.529536869s" podCreationTimestamp="2026-02-03 07:22:53 +0000 UTC" firstStartedPulling="2026-02-03 07:22:54.440688064 +0000 UTC m=+753.422634871" lastFinishedPulling="2026-02-03 07:22:59.118251792 +0000 UTC m=+758.100198599" observedRunningTime="2026-02-03 07:22:59.525944491 +0000 UTC m=+758.507891298" watchObservedRunningTime="2026-02-03 07:22:59.529536869 +0000 UTC m=+758.511483676" Feb 03 07:23:00 crc kubenswrapper[4708]: I0203 07:23:00.513735 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:23:14 crc kubenswrapper[4708]: I0203 07:23:14.100142 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7d65755cd8-mqk5v" Feb 03 07:23:23 crc kubenswrapper[4708]: I0203 07:23:23.833379 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:23:23 crc kubenswrapper[4708]: I0203 07:23:23.834126 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:23:33 crc kubenswrapper[4708]: I0203 07:23:33.814526 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7f7b788fc7-dmpn2" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.563844 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b"] Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.570515 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.571349 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-cd7sq"] Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.574276 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-ktntv" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.574844 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.578097 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.578152 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.578289 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.583470 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b"] Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.660553 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-hcz88"] Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.661548 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.666861 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-cbjxc"] Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.667882 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.669245 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.669479 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.669637 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.669892 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.670051 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-ghpxg" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.672377 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-frr-sockets\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.672423 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-frr-conf\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.672450 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-metrics\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.672472 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-reloader\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.672567 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a0d5231d-187e-49dd-8f33-12613dd34c3b-frr-startup\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.672622 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2l45\" (UniqueName: \"kubernetes.io/projected/a0d5231d-187e-49dd-8f33-12613dd34c3b-kube-api-access-b2l45\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.672664 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20c5389c-b542-4620-ac99-0ecfb0ae7720-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-xs54b\" (UID: \"20c5389c-b542-4620-ac99-0ecfb0ae7720\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.672711 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24s8k\" (UniqueName: \"kubernetes.io/projected/20c5389c-b542-4620-ac99-0ecfb0ae7720-kube-api-access-24s8k\") pod \"frr-k8s-webhook-server-7df86c4f6c-xs54b\" (UID: \"20c5389c-b542-4620-ac99-0ecfb0ae7720\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.672759 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a0d5231d-187e-49dd-8f33-12613dd34c3b-metrics-certs\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.687074 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-cbjxc"] Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774282 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-reloader\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774338 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g66z8\" (UniqueName: \"kubernetes.io/projected/fae1d180-0dfb-4f07-922c-5b158d2ebcd3-kube-api-access-g66z8\") pod \"controller-6968d8fdc4-cbjxc\" (UID: \"fae1d180-0dfb-4f07-922c-5b158d2ebcd3\") " pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774375 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a0d5231d-187e-49dd-8f33-12613dd34c3b-frr-startup\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774477 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2l45\" (UniqueName: \"kubernetes.io/projected/a0d5231d-187e-49dd-8f33-12613dd34c3b-kube-api-access-b2l45\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774531 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r877j\" (UniqueName: \"kubernetes.io/projected/31740186-1408-414d-86ee-66b5f2219175-kube-api-access-r877j\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774570 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fae1d180-0dfb-4f07-922c-5b158d2ebcd3-cert\") pod \"controller-6968d8fdc4-cbjxc\" (UID: \"fae1d180-0dfb-4f07-922c-5b158d2ebcd3\") " pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774613 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20c5389c-b542-4620-ac99-0ecfb0ae7720-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-xs54b\" (UID: \"20c5389c-b542-4620-ac99-0ecfb0ae7720\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774694 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/31740186-1408-414d-86ee-66b5f2219175-metallb-excludel2\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774740 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24s8k\" (UniqueName: \"kubernetes.io/projected/20c5389c-b542-4620-ac99-0ecfb0ae7720-kube-api-access-24s8k\") pod \"frr-k8s-webhook-server-7df86c4f6c-xs54b\" (UID: \"20c5389c-b542-4620-ac99-0ecfb0ae7720\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774772 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a0d5231d-187e-49dd-8f33-12613dd34c3b-metrics-certs\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774833 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-memberlist\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774867 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-frr-sockets\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.774977 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-frr-conf\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.775029 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-reloader\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.775027 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-metrics-certs\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.775114 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-metrics\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.775166 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fae1d180-0dfb-4f07-922c-5b158d2ebcd3-metrics-certs\") pod \"controller-6968d8fdc4-cbjxc\" (UID: \"fae1d180-0dfb-4f07-922c-5b158d2ebcd3\") " pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.775193 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-frr-sockets\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.775571 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-metrics\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.775616 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a0d5231d-187e-49dd-8f33-12613dd34c3b-frr-conf\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.776296 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a0d5231d-187e-49dd-8f33-12613dd34c3b-frr-startup\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.780468 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a0d5231d-187e-49dd-8f33-12613dd34c3b-metrics-certs\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.783423 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20c5389c-b542-4620-ac99-0ecfb0ae7720-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-xs54b\" (UID: \"20c5389c-b542-4620-ac99-0ecfb0ae7720\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.791859 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24s8k\" (UniqueName: \"kubernetes.io/projected/20c5389c-b542-4620-ac99-0ecfb0ae7720-kube-api-access-24s8k\") pod \"frr-k8s-webhook-server-7df86c4f6c-xs54b\" (UID: \"20c5389c-b542-4620-ac99-0ecfb0ae7720\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.792555 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2l45\" (UniqueName: \"kubernetes.io/projected/a0d5231d-187e-49dd-8f33-12613dd34c3b-kube-api-access-b2l45\") pod \"frr-k8s-cd7sq\" (UID: \"a0d5231d-187e-49dd-8f33-12613dd34c3b\") " pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.876471 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-metrics-certs\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.876539 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fae1d180-0dfb-4f07-922c-5b158d2ebcd3-metrics-certs\") pod \"controller-6968d8fdc4-cbjxc\" (UID: \"fae1d180-0dfb-4f07-922c-5b158d2ebcd3\") " pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.876579 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g66z8\" (UniqueName: \"kubernetes.io/projected/fae1d180-0dfb-4f07-922c-5b158d2ebcd3-kube-api-access-g66z8\") pod \"controller-6968d8fdc4-cbjxc\" (UID: \"fae1d180-0dfb-4f07-922c-5b158d2ebcd3\") " pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.876616 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r877j\" (UniqueName: \"kubernetes.io/projected/31740186-1408-414d-86ee-66b5f2219175-kube-api-access-r877j\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.876640 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fae1d180-0dfb-4f07-922c-5b158d2ebcd3-cert\") pod \"controller-6968d8fdc4-cbjxc\" (UID: \"fae1d180-0dfb-4f07-922c-5b158d2ebcd3\") " pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.876687 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/31740186-1408-414d-86ee-66b5f2219175-metallb-excludel2\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.876715 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-memberlist\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: E0203 07:23:34.876859 4708 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 03 07:23:34 crc kubenswrapper[4708]: E0203 07:23:34.876908 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-memberlist podName:31740186-1408-414d-86ee-66b5f2219175 nodeName:}" failed. No retries permitted until 2026-02-03 07:23:35.376891669 +0000 UTC m=+794.358838476 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-memberlist") pod "speaker-hcz88" (UID: "31740186-1408-414d-86ee-66b5f2219175") : secret "metallb-memberlist" not found Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.877696 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/31740186-1408-414d-86ee-66b5f2219175-metallb-excludel2\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.881082 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fae1d180-0dfb-4f07-922c-5b158d2ebcd3-metrics-certs\") pod \"controller-6968d8fdc4-cbjxc\" (UID: \"fae1d180-0dfb-4f07-922c-5b158d2ebcd3\") " pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.889338 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-metrics-certs\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.889417 4708 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.891450 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.895262 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g66z8\" (UniqueName: \"kubernetes.io/projected/fae1d180-0dfb-4f07-922c-5b158d2ebcd3-kube-api-access-g66z8\") pod \"controller-6968d8fdc4-cbjxc\" (UID: \"fae1d180-0dfb-4f07-922c-5b158d2ebcd3\") " pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.900456 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fae1d180-0dfb-4f07-922c-5b158d2ebcd3-cert\") pod \"controller-6968d8fdc4-cbjxc\" (UID: \"fae1d180-0dfb-4f07-922c-5b158d2ebcd3\") " pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.903093 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.905447 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r877j\" (UniqueName: \"kubernetes.io/projected/31740186-1408-414d-86ee-66b5f2219175-kube-api-access-r877j\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:34 crc kubenswrapper[4708]: I0203 07:23:34.987590 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:35 crc kubenswrapper[4708]: I0203 07:23:35.218811 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-cbjxc"] Feb 03 07:23:35 crc kubenswrapper[4708]: I0203 07:23:35.309590 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b"] Feb 03 07:23:35 crc kubenswrapper[4708]: W0203 07:23:35.313563 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20c5389c_b542_4620_ac99_0ecfb0ae7720.slice/crio-2396106d13a82aad73fc28c839dfe3525a3b1c34694574ec921977048cef7ce0 WatchSource:0}: Error finding container 2396106d13a82aad73fc28c839dfe3525a3b1c34694574ec921977048cef7ce0: Status 404 returned error can't find the container with id 2396106d13a82aad73fc28c839dfe3525a3b1c34694574ec921977048cef7ce0 Feb 03 07:23:35 crc kubenswrapper[4708]: I0203 07:23:35.389426 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-memberlist\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:35 crc kubenswrapper[4708]: E0203 07:23:35.389623 4708 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 03 07:23:35 crc kubenswrapper[4708]: E0203 07:23:35.389689 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-memberlist podName:31740186-1408-414d-86ee-66b5f2219175 nodeName:}" failed. No retries permitted until 2026-02-03 07:23:36.389673975 +0000 UTC m=+795.371620772 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-memberlist") pod "speaker-hcz88" (UID: "31740186-1408-414d-86ee-66b5f2219175") : secret "metallb-memberlist" not found Feb 03 07:23:35 crc kubenswrapper[4708]: I0203 07:23:35.729382 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cd7sq" event={"ID":"a0d5231d-187e-49dd-8f33-12613dd34c3b","Type":"ContainerStarted","Data":"16da28f72db7021da9319c898c2834ccf9d451d646fb51aabd798665354b8466"} Feb 03 07:23:35 crc kubenswrapper[4708]: I0203 07:23:35.731173 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-cbjxc" event={"ID":"fae1d180-0dfb-4f07-922c-5b158d2ebcd3","Type":"ContainerStarted","Data":"8fdcba0081c9bde616699fe27e8a5de39102fa1ec296400ccdaee156a9f77632"} Feb 03 07:23:35 crc kubenswrapper[4708]: I0203 07:23:35.731207 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-cbjxc" event={"ID":"fae1d180-0dfb-4f07-922c-5b158d2ebcd3","Type":"ContainerStarted","Data":"6543bb0704fe84bff6dcb6040f911f5602073d2ab9321379d006448caa5a7846"} Feb 03 07:23:35 crc kubenswrapper[4708]: I0203 07:23:35.731230 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-cbjxc" event={"ID":"fae1d180-0dfb-4f07-922c-5b158d2ebcd3","Type":"ContainerStarted","Data":"872a6f56278d447dcafbc0c55a57ce2b11b24eebc3a34fe7618396e166bb480c"} Feb 03 07:23:35 crc kubenswrapper[4708]: I0203 07:23:35.731286 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:23:35 crc kubenswrapper[4708]: I0203 07:23:35.732065 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" event={"ID":"20c5389c-b542-4620-ac99-0ecfb0ae7720","Type":"ContainerStarted","Data":"2396106d13a82aad73fc28c839dfe3525a3b1c34694574ec921977048cef7ce0"} Feb 03 07:23:35 crc kubenswrapper[4708]: I0203 07:23:35.757775 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-cbjxc" podStartSLOduration=1.757722519 podStartE2EDuration="1.757722519s" podCreationTimestamp="2026-02-03 07:23:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:23:35.747220837 +0000 UTC m=+794.729167674" watchObservedRunningTime="2026-02-03 07:23:35.757722519 +0000 UTC m=+794.739669326" Feb 03 07:23:36 crc kubenswrapper[4708]: I0203 07:23:36.402438 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-memberlist\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:36 crc kubenswrapper[4708]: I0203 07:23:36.408390 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31740186-1408-414d-86ee-66b5f2219175-memberlist\") pod \"speaker-hcz88\" (UID: \"31740186-1408-414d-86ee-66b5f2219175\") " pod="metallb-system/speaker-hcz88" Feb 03 07:23:36 crc kubenswrapper[4708]: I0203 07:23:36.479679 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-hcz88" Feb 03 07:23:36 crc kubenswrapper[4708]: W0203 07:23:36.544856 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31740186_1408_414d_86ee_66b5f2219175.slice/crio-677996433f32eb782acd9edf98e8d187a72f92b1ce7966c178e881373663f1a8 WatchSource:0}: Error finding container 677996433f32eb782acd9edf98e8d187a72f92b1ce7966c178e881373663f1a8: Status 404 returned error can't find the container with id 677996433f32eb782acd9edf98e8d187a72f92b1ce7966c178e881373663f1a8 Feb 03 07:23:36 crc kubenswrapper[4708]: I0203 07:23:36.743912 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hcz88" event={"ID":"31740186-1408-414d-86ee-66b5f2219175","Type":"ContainerStarted","Data":"677996433f32eb782acd9edf98e8d187a72f92b1ce7966c178e881373663f1a8"} Feb 03 07:23:37 crc kubenswrapper[4708]: I0203 07:23:37.754939 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hcz88" event={"ID":"31740186-1408-414d-86ee-66b5f2219175","Type":"ContainerStarted","Data":"84c0d9e684161dc429d99999eab79addd6c2206255431b43463dd7ebda1cf9ba"} Feb 03 07:23:37 crc kubenswrapper[4708]: I0203 07:23:37.755302 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hcz88" event={"ID":"31740186-1408-414d-86ee-66b5f2219175","Type":"ContainerStarted","Data":"cc77aac96f8420c2f05faae9111b3796ad987be02c8628b999d8c3ac8590846f"} Feb 03 07:23:37 crc kubenswrapper[4708]: I0203 07:23:37.755322 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-hcz88" Feb 03 07:23:37 crc kubenswrapper[4708]: I0203 07:23:37.772522 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-hcz88" podStartSLOduration=3.772504932 podStartE2EDuration="3.772504932s" podCreationTimestamp="2026-02-03 07:23:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:23:37.768606629 +0000 UTC m=+796.750553436" watchObservedRunningTime="2026-02-03 07:23:37.772504932 +0000 UTC m=+796.754451729" Feb 03 07:23:42 crc kubenswrapper[4708]: I0203 07:23:42.798541 4708 generic.go:334] "Generic (PLEG): container finished" podID="a0d5231d-187e-49dd-8f33-12613dd34c3b" containerID="d2625736456723ad6d53bf6aae12962d952d4dd6b62f0f5a91a978ad98c50e34" exitCode=0 Feb 03 07:23:42 crc kubenswrapper[4708]: I0203 07:23:42.799172 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cd7sq" event={"ID":"a0d5231d-187e-49dd-8f33-12613dd34c3b","Type":"ContainerDied","Data":"d2625736456723ad6d53bf6aae12962d952d4dd6b62f0f5a91a978ad98c50e34"} Feb 03 07:23:42 crc kubenswrapper[4708]: I0203 07:23:42.801485 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" event={"ID":"20c5389c-b542-4620-ac99-0ecfb0ae7720","Type":"ContainerStarted","Data":"e1666d73ac7c826ba5868c698b7fcf544589ae13eedee580e93e939c70c37b53"} Feb 03 07:23:42 crc kubenswrapper[4708]: I0203 07:23:42.801694 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" Feb 03 07:23:42 crc kubenswrapper[4708]: I0203 07:23:42.865543 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" podStartSLOduration=1.7820256799999998 podStartE2EDuration="8.865508895s" podCreationTimestamp="2026-02-03 07:23:34 +0000 UTC" firstStartedPulling="2026-02-03 07:23:35.315568233 +0000 UTC m=+794.297515080" lastFinishedPulling="2026-02-03 07:23:42.399051488 +0000 UTC m=+801.380998295" observedRunningTime="2026-02-03 07:23:42.858231711 +0000 UTC m=+801.840178548" watchObservedRunningTime="2026-02-03 07:23:42.865508895 +0000 UTC m=+801.847455702" Feb 03 07:23:43 crc kubenswrapper[4708]: I0203 07:23:43.818415 4708 generic.go:334] "Generic (PLEG): container finished" podID="a0d5231d-187e-49dd-8f33-12613dd34c3b" containerID="5b2bd7008f3fd0df5a4470d977abaa7037aed78d1d24f95f677bcf5f08ceb80e" exitCode=0 Feb 03 07:23:43 crc kubenswrapper[4708]: I0203 07:23:43.818547 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cd7sq" event={"ID":"a0d5231d-187e-49dd-8f33-12613dd34c3b","Type":"ContainerDied","Data":"5b2bd7008f3fd0df5a4470d977abaa7037aed78d1d24f95f677bcf5f08ceb80e"} Feb 03 07:23:44 crc kubenswrapper[4708]: I0203 07:23:44.825930 4708 generic.go:334] "Generic (PLEG): container finished" podID="a0d5231d-187e-49dd-8f33-12613dd34c3b" containerID="9e7bf65a98b2096a8afeb6f7744eb8c925b3c79032604054b5e09116216ab141" exitCode=0 Feb 03 07:23:44 crc kubenswrapper[4708]: I0203 07:23:44.826010 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cd7sq" event={"ID":"a0d5231d-187e-49dd-8f33-12613dd34c3b","Type":"ContainerDied","Data":"9e7bf65a98b2096a8afeb6f7744eb8c925b3c79032604054b5e09116216ab141"} Feb 03 07:23:45 crc kubenswrapper[4708]: I0203 07:23:45.838183 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cd7sq" event={"ID":"a0d5231d-187e-49dd-8f33-12613dd34c3b","Type":"ContainerStarted","Data":"6f12056c14680d56d4e016562ccc091937367cf91e180cad1fb6edd1beadeb68"} Feb 03 07:23:45 crc kubenswrapper[4708]: I0203 07:23:45.838573 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:45 crc kubenswrapper[4708]: I0203 07:23:45.838597 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cd7sq" event={"ID":"a0d5231d-187e-49dd-8f33-12613dd34c3b","Type":"ContainerStarted","Data":"9ba411a712c4b01a9dbd37ff27033f8662cdb822b40123b2bcc1266a8d23f834"} Feb 03 07:23:45 crc kubenswrapper[4708]: I0203 07:23:45.838611 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cd7sq" event={"ID":"a0d5231d-187e-49dd-8f33-12613dd34c3b","Type":"ContainerStarted","Data":"0727090c328efac80ac13bc4c8a9877fe7cccf1d41dced6ccafbeeb0feaeddf5"} Feb 03 07:23:45 crc kubenswrapper[4708]: I0203 07:23:45.838624 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cd7sq" event={"ID":"a0d5231d-187e-49dd-8f33-12613dd34c3b","Type":"ContainerStarted","Data":"70fe94582126acf3846a433abc6893f62b6e54dcabd6a957d1f5c4394b2276cb"} Feb 03 07:23:45 crc kubenswrapper[4708]: I0203 07:23:45.838634 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cd7sq" event={"ID":"a0d5231d-187e-49dd-8f33-12613dd34c3b","Type":"ContainerStarted","Data":"e4be251e8ba7e56ec8d6996a1af7b997077b2f0839a339e9e784ca71bba724ae"} Feb 03 07:23:45 crc kubenswrapper[4708]: I0203 07:23:45.838644 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-cd7sq" event={"ID":"a0d5231d-187e-49dd-8f33-12613dd34c3b","Type":"ContainerStarted","Data":"dd75de36f9984f406f273e315a3a0a7673eab5f2723acb902007778e665f77c1"} Feb 03 07:23:45 crc kubenswrapper[4708]: I0203 07:23:45.868489 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-cd7sq" podStartSLOduration=4.544706042 podStartE2EDuration="11.868469685s" podCreationTimestamp="2026-02-03 07:23:34 +0000 UTC" firstStartedPulling="2026-02-03 07:23:35.09347235 +0000 UTC m=+794.075419167" lastFinishedPulling="2026-02-03 07:23:42.417236003 +0000 UTC m=+801.399182810" observedRunningTime="2026-02-03 07:23:45.861595851 +0000 UTC m=+804.843542658" watchObservedRunningTime="2026-02-03 07:23:45.868469685 +0000 UTC m=+804.850416492" Feb 03 07:23:46 crc kubenswrapper[4708]: I0203 07:23:46.483465 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-hcz88" Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.528188 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-wnv7q"] Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.530392 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wnv7q" Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.533389 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-md4gd" Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.533849 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.535508 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.562987 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wnv7q"] Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.694257 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nddkx\" (UniqueName: \"kubernetes.io/projected/69f8b7bd-c491-4ea0-8eb6-7e705ac136fd-kube-api-access-nddkx\") pod \"openstack-operator-index-wnv7q\" (UID: \"69f8b7bd-c491-4ea0-8eb6-7e705ac136fd\") " pod="openstack-operators/openstack-operator-index-wnv7q" Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.795715 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nddkx\" (UniqueName: \"kubernetes.io/projected/69f8b7bd-c491-4ea0-8eb6-7e705ac136fd-kube-api-access-nddkx\") pod \"openstack-operator-index-wnv7q\" (UID: \"69f8b7bd-c491-4ea0-8eb6-7e705ac136fd\") " pod="openstack-operators/openstack-operator-index-wnv7q" Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.815855 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nddkx\" (UniqueName: \"kubernetes.io/projected/69f8b7bd-c491-4ea0-8eb6-7e705ac136fd-kube-api-access-nddkx\") pod \"openstack-operator-index-wnv7q\" (UID: \"69f8b7bd-c491-4ea0-8eb6-7e705ac136fd\") " pod="openstack-operators/openstack-operator-index-wnv7q" Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.895874 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wnv7q" Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.903781 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:49 crc kubenswrapper[4708]: I0203 07:23:49.954448 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:50 crc kubenswrapper[4708]: I0203 07:23:50.116511 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wnv7q"] Feb 03 07:23:50 crc kubenswrapper[4708]: I0203 07:23:50.869495 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wnv7q" event={"ID":"69f8b7bd-c491-4ea0-8eb6-7e705ac136fd","Type":"ContainerStarted","Data":"6a3744e6cc724518c9783994fcdb2c3adea265b9429987743a220ae7967e92ad"} Feb 03 07:23:52 crc kubenswrapper[4708]: I0203 07:23:52.700926 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-wnv7q"] Feb 03 07:23:52 crc kubenswrapper[4708]: I0203 07:23:52.887538 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wnv7q" event={"ID":"69f8b7bd-c491-4ea0-8eb6-7e705ac136fd","Type":"ContainerStarted","Data":"63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6"} Feb 03 07:23:52 crc kubenswrapper[4708]: I0203 07:23:52.887665 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-wnv7q" podUID="69f8b7bd-c491-4ea0-8eb6-7e705ac136fd" containerName="registry-server" containerID="cri-o://63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6" gracePeriod=2 Feb 03 07:23:52 crc kubenswrapper[4708]: I0203 07:23:52.905199 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-wnv7q" podStartSLOduration=1.3290396420000001 podStartE2EDuration="3.905182023s" podCreationTimestamp="2026-02-03 07:23:49 +0000 UTC" firstStartedPulling="2026-02-03 07:23:50.128876204 +0000 UTC m=+809.110823011" lastFinishedPulling="2026-02-03 07:23:52.705018585 +0000 UTC m=+811.686965392" observedRunningTime="2026-02-03 07:23:52.899363383 +0000 UTC m=+811.881310200" watchObservedRunningTime="2026-02-03 07:23:52.905182023 +0000 UTC m=+811.887128830" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.219010 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wnv7q" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.258524 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nddkx\" (UniqueName: \"kubernetes.io/projected/69f8b7bd-c491-4ea0-8eb6-7e705ac136fd-kube-api-access-nddkx\") pod \"69f8b7bd-c491-4ea0-8eb6-7e705ac136fd\" (UID: \"69f8b7bd-c491-4ea0-8eb6-7e705ac136fd\") " Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.263757 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69f8b7bd-c491-4ea0-8eb6-7e705ac136fd-kube-api-access-nddkx" (OuterVolumeSpecName: "kube-api-access-nddkx") pod "69f8b7bd-c491-4ea0-8eb6-7e705ac136fd" (UID: "69f8b7bd-c491-4ea0-8eb6-7e705ac136fd"). InnerVolumeSpecName "kube-api-access-nddkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.309927 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-pdxl8"] Feb 03 07:23:53 crc kubenswrapper[4708]: E0203 07:23:53.310193 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69f8b7bd-c491-4ea0-8eb6-7e705ac136fd" containerName="registry-server" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.310210 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="69f8b7bd-c491-4ea0-8eb6-7e705ac136fd" containerName="registry-server" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.310332 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="69f8b7bd-c491-4ea0-8eb6-7e705ac136fd" containerName="registry-server" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.310865 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pdxl8" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.314616 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-pdxl8"] Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.359750 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzbrb\" (UniqueName: \"kubernetes.io/projected/a6e65791-8332-41fb-aac6-d17c3ac9d6f6-kube-api-access-hzbrb\") pod \"openstack-operator-index-pdxl8\" (UID: \"a6e65791-8332-41fb-aac6-d17c3ac9d6f6\") " pod="openstack-operators/openstack-operator-index-pdxl8" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.359866 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nddkx\" (UniqueName: \"kubernetes.io/projected/69f8b7bd-c491-4ea0-8eb6-7e705ac136fd-kube-api-access-nddkx\") on node \"crc\" DevicePath \"\"" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.460556 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzbrb\" (UniqueName: \"kubernetes.io/projected/a6e65791-8332-41fb-aac6-d17c3ac9d6f6-kube-api-access-hzbrb\") pod \"openstack-operator-index-pdxl8\" (UID: \"a6e65791-8332-41fb-aac6-d17c3ac9d6f6\") " pod="openstack-operators/openstack-operator-index-pdxl8" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.488143 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzbrb\" (UniqueName: \"kubernetes.io/projected/a6e65791-8332-41fb-aac6-d17c3ac9d6f6-kube-api-access-hzbrb\") pod \"openstack-operator-index-pdxl8\" (UID: \"a6e65791-8332-41fb-aac6-d17c3ac9d6f6\") " pod="openstack-operators/openstack-operator-index-pdxl8" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.633342 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pdxl8" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.833442 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.833835 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.856250 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-pdxl8"] Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.895498 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pdxl8" event={"ID":"a6e65791-8332-41fb-aac6-d17c3ac9d6f6","Type":"ContainerStarted","Data":"c2e7a2c20f560da7dbae8ff127cbf15bca4f6fac24d4a90fa68f87035f34564a"} Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.896754 4708 generic.go:334] "Generic (PLEG): container finished" podID="69f8b7bd-c491-4ea0-8eb6-7e705ac136fd" containerID="63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6" exitCode=0 Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.896783 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wnv7q" event={"ID":"69f8b7bd-c491-4ea0-8eb6-7e705ac136fd","Type":"ContainerDied","Data":"63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6"} Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.896807 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wnv7q" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.896820 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wnv7q" event={"ID":"69f8b7bd-c491-4ea0-8eb6-7e705ac136fd","Type":"ContainerDied","Data":"6a3744e6cc724518c9783994fcdb2c3adea265b9429987743a220ae7967e92ad"} Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.896840 4708 scope.go:117] "RemoveContainer" containerID="63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.913487 4708 scope.go:117] "RemoveContainer" containerID="63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6" Feb 03 07:23:53 crc kubenswrapper[4708]: E0203 07:23:53.913922 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6\": container with ID starting with 63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6 not found: ID does not exist" containerID="63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.913950 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6"} err="failed to get container status \"63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6\": rpc error: code = NotFound desc = could not find container \"63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6\": container with ID starting with 63a563c9bd003068950aebb0955d662f9bd952a039692383c4f063b5d15884a6 not found: ID does not exist" Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.930195 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-wnv7q"] Feb 03 07:23:53 crc kubenswrapper[4708]: I0203 07:23:53.937114 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-wnv7q"] Feb 03 07:23:54 crc kubenswrapper[4708]: I0203 07:23:54.105366 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69f8b7bd-c491-4ea0-8eb6-7e705ac136fd" path="/var/lib/kubelet/pods/69f8b7bd-c491-4ea0-8eb6-7e705ac136fd/volumes" Feb 03 07:23:54 crc kubenswrapper[4708]: I0203 07:23:54.895956 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-xs54b" Feb 03 07:23:54 crc kubenswrapper[4708]: I0203 07:23:54.906739 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-cd7sq" Feb 03 07:23:54 crc kubenswrapper[4708]: I0203 07:23:54.906913 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pdxl8" event={"ID":"a6e65791-8332-41fb-aac6-d17c3ac9d6f6","Type":"ContainerStarted","Data":"4a6090394bfb04e298ec94e94519179178bb4f213a574d37487077623f337c67"} Feb 03 07:23:54 crc kubenswrapper[4708]: I0203 07:23:54.930745 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-pdxl8" podStartSLOduration=1.792342712 podStartE2EDuration="1.930722872s" podCreationTimestamp="2026-02-03 07:23:53 +0000 UTC" firstStartedPulling="2026-02-03 07:23:53.871826694 +0000 UTC m=+812.853773501" lastFinishedPulling="2026-02-03 07:23:54.010206844 +0000 UTC m=+812.992153661" observedRunningTime="2026-02-03 07:23:54.927059775 +0000 UTC m=+813.909006592" watchObservedRunningTime="2026-02-03 07:23:54.930722872 +0000 UTC m=+813.912669679" Feb 03 07:23:54 crc kubenswrapper[4708]: I0203 07:23:54.991710 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-cbjxc" Feb 03 07:24:03 crc kubenswrapper[4708]: I0203 07:24:03.633709 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-pdxl8" Feb 03 07:24:03 crc kubenswrapper[4708]: I0203 07:24:03.634449 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-pdxl8" Feb 03 07:24:03 crc kubenswrapper[4708]: I0203 07:24:03.664182 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-pdxl8" Feb 03 07:24:04 crc kubenswrapper[4708]: I0203 07:24:04.012411 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-pdxl8" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.763587 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg"] Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.765356 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.768391 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-pp2x4" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.773107 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg"] Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.788014 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-bundle\") pod \"63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.788455 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-util\") pod \"63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.788516 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r5lp\" (UniqueName: \"kubernetes.io/projected/93b116c1-740f-430c-bb44-20ffc67925f0-kube-api-access-7r5lp\") pod \"63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.890169 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-bundle\") pod \"63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.890532 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r5lp\" (UniqueName: \"kubernetes.io/projected/93b116c1-740f-430c-bb44-20ffc67925f0-kube-api-access-7r5lp\") pod \"63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.890587 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-util\") pod \"63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.890663 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-bundle\") pod \"63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.891009 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-util\") pod \"63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:09 crc kubenswrapper[4708]: I0203 07:24:09.921968 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r5lp\" (UniqueName: \"kubernetes.io/projected/93b116c1-740f-430c-bb44-20ffc67925f0-kube-api-access-7r5lp\") pod \"63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:10 crc kubenswrapper[4708]: I0203 07:24:10.090463 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:10 crc kubenswrapper[4708]: I0203 07:24:10.286084 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg"] Feb 03 07:24:11 crc kubenswrapper[4708]: I0203 07:24:11.039921 4708 generic.go:334] "Generic (PLEG): container finished" podID="93b116c1-740f-430c-bb44-20ffc67925f0" containerID="faaa4c6e71839628fb0a1842fefb7249b2b54dd7596ca0fb6fa41d8e07f33a34" exitCode=0 Feb 03 07:24:11 crc kubenswrapper[4708]: I0203 07:24:11.039987 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" event={"ID":"93b116c1-740f-430c-bb44-20ffc67925f0","Type":"ContainerDied","Data":"faaa4c6e71839628fb0a1842fefb7249b2b54dd7596ca0fb6fa41d8e07f33a34"} Feb 03 07:24:11 crc kubenswrapper[4708]: I0203 07:24:11.040027 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" event={"ID":"93b116c1-740f-430c-bb44-20ffc67925f0","Type":"ContainerStarted","Data":"b7b28094d57b3360e8b2ea45eaaac73ece4585a0b6c740d2eca38f0dc6076100"} Feb 03 07:24:12 crc kubenswrapper[4708]: I0203 07:24:12.049612 4708 generic.go:334] "Generic (PLEG): container finished" podID="93b116c1-740f-430c-bb44-20ffc67925f0" containerID="6a21d005e350f5202ef3f0e7a11bf88a3707401da3db1b5e57ee158a585a6f02" exitCode=0 Feb 03 07:24:12 crc kubenswrapper[4708]: I0203 07:24:12.049710 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" event={"ID":"93b116c1-740f-430c-bb44-20ffc67925f0","Type":"ContainerDied","Data":"6a21d005e350f5202ef3f0e7a11bf88a3707401da3db1b5e57ee158a585a6f02"} Feb 03 07:24:13 crc kubenswrapper[4708]: I0203 07:24:13.063978 4708 generic.go:334] "Generic (PLEG): container finished" podID="93b116c1-740f-430c-bb44-20ffc67925f0" containerID="89b3ee863366074a015355fc62d82b4f547c66772bb0cdda3755c040f995aade" exitCode=0 Feb 03 07:24:13 crc kubenswrapper[4708]: I0203 07:24:13.064041 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" event={"ID":"93b116c1-740f-430c-bb44-20ffc67925f0","Type":"ContainerDied","Data":"89b3ee863366074a015355fc62d82b4f547c66772bb0cdda3755c040f995aade"} Feb 03 07:24:14 crc kubenswrapper[4708]: I0203 07:24:14.304964 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:14 crc kubenswrapper[4708]: I0203 07:24:14.449221 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-util\") pod \"93b116c1-740f-430c-bb44-20ffc67925f0\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " Feb 03 07:24:14 crc kubenswrapper[4708]: I0203 07:24:14.449425 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-bundle\") pod \"93b116c1-740f-430c-bb44-20ffc67925f0\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " Feb 03 07:24:14 crc kubenswrapper[4708]: I0203 07:24:14.449561 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7r5lp\" (UniqueName: \"kubernetes.io/projected/93b116c1-740f-430c-bb44-20ffc67925f0-kube-api-access-7r5lp\") pod \"93b116c1-740f-430c-bb44-20ffc67925f0\" (UID: \"93b116c1-740f-430c-bb44-20ffc67925f0\") " Feb 03 07:24:14 crc kubenswrapper[4708]: I0203 07:24:14.450288 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-bundle" (OuterVolumeSpecName: "bundle") pod "93b116c1-740f-430c-bb44-20ffc67925f0" (UID: "93b116c1-740f-430c-bb44-20ffc67925f0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:24:14 crc kubenswrapper[4708]: I0203 07:24:14.454840 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93b116c1-740f-430c-bb44-20ffc67925f0-kube-api-access-7r5lp" (OuterVolumeSpecName: "kube-api-access-7r5lp") pod "93b116c1-740f-430c-bb44-20ffc67925f0" (UID: "93b116c1-740f-430c-bb44-20ffc67925f0"). InnerVolumeSpecName "kube-api-access-7r5lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:24:14 crc kubenswrapper[4708]: I0203 07:24:14.479067 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-util" (OuterVolumeSpecName: "util") pod "93b116c1-740f-430c-bb44-20ffc67925f0" (UID: "93b116c1-740f-430c-bb44-20ffc67925f0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:24:14 crc kubenswrapper[4708]: I0203 07:24:14.552717 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7r5lp\" (UniqueName: \"kubernetes.io/projected/93b116c1-740f-430c-bb44-20ffc67925f0-kube-api-access-7r5lp\") on node \"crc\" DevicePath \"\"" Feb 03 07:24:14 crc kubenswrapper[4708]: I0203 07:24:14.553084 4708 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-util\") on node \"crc\" DevicePath \"\"" Feb 03 07:24:14 crc kubenswrapper[4708]: I0203 07:24:14.553105 4708 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/93b116c1-740f-430c-bb44-20ffc67925f0-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:24:15 crc kubenswrapper[4708]: I0203 07:24:15.079761 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" event={"ID":"93b116c1-740f-430c-bb44-20ffc67925f0","Type":"ContainerDied","Data":"b7b28094d57b3360e8b2ea45eaaac73ece4585a0b6c740d2eca38f0dc6076100"} Feb 03 07:24:15 crc kubenswrapper[4708]: I0203 07:24:15.079815 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7b28094d57b3360e8b2ea45eaaac73ece4585a0b6c740d2eca38f0dc6076100" Feb 03 07:24:15 crc kubenswrapper[4708]: I0203 07:24:15.079907 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.594788 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2"] Feb 03 07:24:16 crc kubenswrapper[4708]: E0203 07:24:16.595055 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93b116c1-740f-430c-bb44-20ffc67925f0" containerName="extract" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.595067 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="93b116c1-740f-430c-bb44-20ffc67925f0" containerName="extract" Feb 03 07:24:16 crc kubenswrapper[4708]: E0203 07:24:16.595080 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93b116c1-740f-430c-bb44-20ffc67925f0" containerName="util" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.595086 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="93b116c1-740f-430c-bb44-20ffc67925f0" containerName="util" Feb 03 07:24:16 crc kubenswrapper[4708]: E0203 07:24:16.595095 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93b116c1-740f-430c-bb44-20ffc67925f0" containerName="pull" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.595101 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="93b116c1-740f-430c-bb44-20ffc67925f0" containerName="pull" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.595217 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="93b116c1-740f-430c-bb44-20ffc67925f0" containerName="extract" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.595644 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.606972 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-67dmr" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.625836 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2"] Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.678663 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2g2c\" (UniqueName: \"kubernetes.io/projected/bc5c0cc4-1640-4e46-86b7-ed4ce809d4aa-kube-api-access-g2g2c\") pod \"openstack-operator-controller-init-d8cb6cd6c-cknw2\" (UID: \"bc5c0cc4-1640-4e46-86b7-ed4ce809d4aa\") " pod="openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.780047 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2g2c\" (UniqueName: \"kubernetes.io/projected/bc5c0cc4-1640-4e46-86b7-ed4ce809d4aa-kube-api-access-g2g2c\") pod \"openstack-operator-controller-init-d8cb6cd6c-cknw2\" (UID: \"bc5c0cc4-1640-4e46-86b7-ed4ce809d4aa\") " pod="openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.798726 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2g2c\" (UniqueName: \"kubernetes.io/projected/bc5c0cc4-1640-4e46-86b7-ed4ce809d4aa-kube-api-access-g2g2c\") pod \"openstack-operator-controller-init-d8cb6cd6c-cknw2\" (UID: \"bc5c0cc4-1640-4e46-86b7-ed4ce809d4aa\") " pod="openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2" Feb 03 07:24:16 crc kubenswrapper[4708]: I0203 07:24:16.913879 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2" Feb 03 07:24:17 crc kubenswrapper[4708]: I0203 07:24:17.365385 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2"] Feb 03 07:24:18 crc kubenswrapper[4708]: I0203 07:24:18.112229 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2" event={"ID":"bc5c0cc4-1640-4e46-86b7-ed4ce809d4aa","Type":"ContainerStarted","Data":"d59c4fbc61fd5c3c94f2b8f803021f692f2b9c47975f7eed8c48ce09e62096c0"} Feb 03 07:24:22 crc kubenswrapper[4708]: I0203 07:24:22.140528 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2" event={"ID":"bc5c0cc4-1640-4e46-86b7-ed4ce809d4aa","Type":"ContainerStarted","Data":"fcb62e255d9b8b4230791021af2f637f40e9e424a016a1a3823c6b205b7752cd"} Feb 03 07:24:22 crc kubenswrapper[4708]: I0203 07:24:22.141191 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2" Feb 03 07:24:22 crc kubenswrapper[4708]: I0203 07:24:22.178959 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2" podStartSLOduration=2.470458397 podStartE2EDuration="6.178934923s" podCreationTimestamp="2026-02-03 07:24:16 +0000 UTC" firstStartedPulling="2026-02-03 07:24:17.373682553 +0000 UTC m=+836.355629400" lastFinishedPulling="2026-02-03 07:24:21.082159119 +0000 UTC m=+840.064105926" observedRunningTime="2026-02-03 07:24:22.163613966 +0000 UTC m=+841.145560773" watchObservedRunningTime="2026-02-03 07:24:22.178934923 +0000 UTC m=+841.160881740" Feb 03 07:24:23 crc kubenswrapper[4708]: I0203 07:24:23.832888 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:24:23 crc kubenswrapper[4708]: I0203 07:24:23.833282 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:24:23 crc kubenswrapper[4708]: I0203 07:24:23.833348 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:24:23 crc kubenswrapper[4708]: I0203 07:24:23.834274 4708 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"85eaca543a1965c16dfd9764a0051f13446290670638b0dde7e65e129f02d68c"} pod="openshift-machine-config-operator/machine-config-daemon-r94bn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:24:23 crc kubenswrapper[4708]: I0203 07:24:23.834408 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" containerID="cri-o://85eaca543a1965c16dfd9764a0051f13446290670638b0dde7e65e129f02d68c" gracePeriod=600 Feb 03 07:24:24 crc kubenswrapper[4708]: I0203 07:24:24.153528 4708 generic.go:334] "Generic (PLEG): container finished" podID="67498414-5132-496e-9638-189f5941ace0" containerID="85eaca543a1965c16dfd9764a0051f13446290670638b0dde7e65e129f02d68c" exitCode=0 Feb 03 07:24:24 crc kubenswrapper[4708]: I0203 07:24:24.153571 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerDied","Data":"85eaca543a1965c16dfd9764a0051f13446290670638b0dde7e65e129f02d68c"} Feb 03 07:24:24 crc kubenswrapper[4708]: I0203 07:24:24.153605 4708 scope.go:117] "RemoveContainer" containerID="b515af69685be553960030cd44dc51009b8fa698b76226a5a9defa9717522d12" Feb 03 07:24:25 crc kubenswrapper[4708]: I0203 07:24:25.165437 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"1f83852e58fe0e7c3b1e3ce74595be18ac409f305b8edecdccc7efc4c0f59a4b"} Feb 03 07:24:26 crc kubenswrapper[4708]: I0203 07:24:26.917623 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-d8cb6cd6c-cknw2" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.395851 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.397439 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.399680 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-7wg2z" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.407752 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.408731 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.410319 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-zlrt7" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.413698 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.414662 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.418556 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-q98vn" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.419727 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.424732 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.440983 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.461664 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.462853 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.470732 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-v7jrm" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.471439 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4gst\" (UniqueName: \"kubernetes.io/projected/bdd317ff-3849-4a28-9640-dd4611b86599-kube-api-access-w4gst\") pod \"designate-operator-controller-manager-8f4c5cb64-d2ddp\" (UID: \"bdd317ff-3849-4a28-9640-dd4611b86599\") " pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.471695 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcc55\" (UniqueName: \"kubernetes.io/projected/f1edcba9-46e3-49fd-bb48-ba29b86c7bac-kube-api-access-bcc55\") pod \"cinder-operator-controller-manager-866f9bb544-m4775\" (UID: \"f1edcba9-46e3-49fd-bb48-ba29b86c7bac\") " pod="openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.471898 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xjjr\" (UniqueName: \"kubernetes.io/projected/fe83b4e1-7562-495b-99bc-aa5d1202881c-kube-api-access-5xjjr\") pod \"barbican-operator-controller-manager-fc589b45f-mqk95\" (UID: \"fe83b4e1-7562-495b-99bc-aa5d1202881c\") " pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.484315 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.498561 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.499377 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.504245 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-blw78" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.516447 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.524643 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.525389 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.541115 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-btqlz"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.541928 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.542892 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-sm59q" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.544258 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.545447 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-r28p5" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.550216 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.564929 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-btqlz"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.575083 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.575402 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddgg7\" (UniqueName: \"kubernetes.io/projected/0b3f5149-6624-450b-b3bd-be0d0ca78c73-kube-api-access-ddgg7\") pod \"glance-operator-controller-manager-5d77f4dbc9-7vww8\" (UID: \"0b3f5149-6624-450b-b3bd-be0d0ca78c73\") " pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.575525 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcc55\" (UniqueName: \"kubernetes.io/projected/f1edcba9-46e3-49fd-bb48-ba29b86c7bac-kube-api-access-bcc55\") pod \"cinder-operator-controller-manager-866f9bb544-m4775\" (UID: \"f1edcba9-46e3-49fd-bb48-ba29b86c7bac\") " pod="openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.575633 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc2jd\" (UniqueName: \"kubernetes.io/projected/794426b0-cf19-43ff-957e-3413c77f0570-kube-api-access-wc2jd\") pod \"heat-operator-controller-manager-65dc6c8d9c-bgm44\" (UID: \"794426b0-cf19-43ff-957e-3413c77f0570\") " pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.575769 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xjjr\" (UniqueName: \"kubernetes.io/projected/fe83b4e1-7562-495b-99bc-aa5d1202881c-kube-api-access-5xjjr\") pod \"barbican-operator-controller-manager-fc589b45f-mqk95\" (UID: \"fe83b4e1-7562-495b-99bc-aa5d1202881c\") " pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.575959 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxhdr\" (UniqueName: \"kubernetes.io/projected/35c2f81b-a6df-4f5c-98c9-e9efb7f362b4-kube-api-access-nxhdr\") pod \"horizon-operator-controller-manager-5fb775575f-gqvwr\" (UID: \"35c2f81b-a6df-4f5c-98c9-e9efb7f362b4\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.576072 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4gst\" (UniqueName: \"kubernetes.io/projected/bdd317ff-3849-4a28-9640-dd4611b86599-kube-api-access-w4gst\") pod \"designate-operator-controller-manager-8f4c5cb64-d2ddp\" (UID: \"bdd317ff-3849-4a28-9640-dd4611b86599\") " pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.576334 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkf5s\" (UniqueName: \"kubernetes.io/projected/9f166dd2-52e4-473c-9168-c065582fa0e4-kube-api-access-zkf5s\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.596406 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.597188 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.601427 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcc55\" (UniqueName: \"kubernetes.io/projected/f1edcba9-46e3-49fd-bb48-ba29b86c7bac-kube-api-access-bcc55\") pod \"cinder-operator-controller-manager-866f9bb544-m4775\" (UID: \"f1edcba9-46e3-49fd-bb48-ba29b86c7bac\") " pod="openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.602807 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-hgl2r" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.606283 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4gst\" (UniqueName: \"kubernetes.io/projected/bdd317ff-3849-4a28-9640-dd4611b86599-kube-api-access-w4gst\") pod \"designate-operator-controller-manager-8f4c5cb64-d2ddp\" (UID: \"bdd317ff-3849-4a28-9640-dd4611b86599\") " pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.607067 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xjjr\" (UniqueName: \"kubernetes.io/projected/fe83b4e1-7562-495b-99bc-aa5d1202881c-kube-api-access-5xjjr\") pod \"barbican-operator-controller-manager-fc589b45f-mqk95\" (UID: \"fe83b4e1-7562-495b-99bc-aa5d1202881c\") " pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.635285 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.653873 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.655967 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.683095 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-4cwpt" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.683389 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l82g4\" (UniqueName: \"kubernetes.io/projected/c6a27492-3276-45de-a2d9-1c605152a0b6-kube-api-access-l82g4\") pod \"ironic-operator-controller-manager-5d86df5cd7-2pljs\" (UID: \"c6a27492-3276-45de-a2d9-1c605152a0b6\") " pod="openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.683459 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7hnp\" (UniqueName: \"kubernetes.io/projected/3e86abc8-c97c-4eef-b181-0d87376edd8f-kube-api-access-j7hnp\") pod \"keystone-operator-controller-manager-64469b487f-kccqb\" (UID: \"3e86abc8-c97c-4eef-b181-0d87376edd8f\") " pod="openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.683493 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxhdr\" (UniqueName: \"kubernetes.io/projected/35c2f81b-a6df-4f5c-98c9-e9efb7f362b4-kube-api-access-nxhdr\") pod \"horizon-operator-controller-manager-5fb775575f-gqvwr\" (UID: \"35c2f81b-a6df-4f5c-98c9-e9efb7f362b4\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.683524 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkf5s\" (UniqueName: \"kubernetes.io/projected/9f166dd2-52e4-473c-9168-c065582fa0e4-kube-api-access-zkf5s\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.683553 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.683599 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddgg7\" (UniqueName: \"kubernetes.io/projected/0b3f5149-6624-450b-b3bd-be0d0ca78c73-kube-api-access-ddgg7\") pod \"glance-operator-controller-manager-5d77f4dbc9-7vww8\" (UID: \"0b3f5149-6624-450b-b3bd-be0d0ca78c73\") " pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.683640 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc2jd\" (UniqueName: \"kubernetes.io/projected/794426b0-cf19-43ff-957e-3413c77f0570-kube-api-access-wc2jd\") pod \"heat-operator-controller-manager-65dc6c8d9c-bgm44\" (UID: \"794426b0-cf19-43ff-957e-3413c77f0570\") " pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44" Feb 03 07:24:45 crc kubenswrapper[4708]: E0203 07:24:45.684029 4708 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:24:45 crc kubenswrapper[4708]: E0203 07:24:45.684116 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert podName:9f166dd2-52e4-473c-9168-c065582fa0e4 nodeName:}" failed. No retries permitted until 2026-02-03 07:24:46.184099051 +0000 UTC m=+865.166045868 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert") pod "infra-operator-controller-manager-79955696d6-btqlz" (UID: "9f166dd2-52e4-473c-9168-c065582fa0e4") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.704829 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.705837 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.710122 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-z68hw" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.716042 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkf5s\" (UniqueName: \"kubernetes.io/projected/9f166dd2-52e4-473c-9168-c065582fa0e4-kube-api-access-zkf5s\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.718399 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.720814 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddgg7\" (UniqueName: \"kubernetes.io/projected/0b3f5149-6624-450b-b3bd-be0d0ca78c73-kube-api-access-ddgg7\") pod \"glance-operator-controller-manager-5d77f4dbc9-7vww8\" (UID: \"0b3f5149-6624-450b-b3bd-be0d0ca78c73\") " pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.725621 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxhdr\" (UniqueName: \"kubernetes.io/projected/35c2f81b-a6df-4f5c-98c9-e9efb7f362b4-kube-api-access-nxhdr\") pod \"horizon-operator-controller-manager-5fb775575f-gqvwr\" (UID: \"35c2f81b-a6df-4f5c-98c9-e9efb7f362b4\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.732025 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.736618 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc2jd\" (UniqueName: \"kubernetes.io/projected/794426b0-cf19-43ff-957e-3413c77f0570-kube-api-access-wc2jd\") pod \"heat-operator-controller-manager-65dc6c8d9c-bgm44\" (UID: \"794426b0-cf19-43ff-957e-3413c77f0570\") " pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.741721 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.750010 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.752187 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.753243 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.762824 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-prf9m" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.776196 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.777042 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.781519 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-jmqkz" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.786090 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.786635 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.787086 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-476cn\" (UniqueName: \"kubernetes.io/projected/d6260b8b-c5f5-4803-8305-0b14903926c9-kube-api-access-476cn\") pod \"manila-operator-controller-manager-7775d87d9d-gpdwp\" (UID: \"d6260b8b-c5f5-4803-8305-0b14903926c9\") " pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.787119 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7hnp\" (UniqueName: \"kubernetes.io/projected/3e86abc8-c97c-4eef-b181-0d87376edd8f-kube-api-access-j7hnp\") pod \"keystone-operator-controller-manager-64469b487f-kccqb\" (UID: \"3e86abc8-c97c-4eef-b181-0d87376edd8f\") " pod="openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.787181 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98t9c\" (UniqueName: \"kubernetes.io/projected/3b0ccfa3-4ef3-4e3c-9127-59e1abc6631d-kube-api-access-98t9c\") pod \"mariadb-operator-controller-manager-67bf948998-gl2lx\" (UID: \"3b0ccfa3-4ef3-4e3c-9127-59e1abc6631d\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.787225 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l82g4\" (UniqueName: \"kubernetes.io/projected/c6a27492-3276-45de-a2d9-1c605152a0b6-kube-api-access-l82g4\") pod \"ironic-operator-controller-manager-5d86df5cd7-2pljs\" (UID: \"c6a27492-3276-45de-a2d9-1c605152a0b6\") " pod="openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.794754 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.801168 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.807889 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.808934 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.810112 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7hnp\" (UniqueName: \"kubernetes.io/projected/3e86abc8-c97c-4eef-b181-0d87376edd8f-kube-api-access-j7hnp\") pod \"keystone-operator-controller-manager-64469b487f-kccqb\" (UID: \"3e86abc8-c97c-4eef-b181-0d87376edd8f\") " pod="openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.810339 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-x9jtm" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.816844 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.817594 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.817940 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l82g4\" (UniqueName: \"kubernetes.io/projected/c6a27492-3276-45de-a2d9-1c605152a0b6-kube-api-access-l82g4\") pod \"ironic-operator-controller-manager-5d86df5cd7-2pljs\" (UID: \"c6a27492-3276-45de-a2d9-1c605152a0b6\") " pod="openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.821276 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-x6t72" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.821433 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.825395 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.826185 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.830016 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.832898 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.833098 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-vqcxf" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.836399 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.837211 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.840128 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-z76q4" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.842547 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.842735 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.848672 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.864955 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.876628 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.876731 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.880016 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.885049 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-7lc9h" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.885518 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.888186 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdsww\" (UniqueName: \"kubernetes.io/projected/f6f38306-d4b2-46fa-9c49-8ac276362db8-kube-api-access-sdsww\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.888239 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-476cn\" (UniqueName: \"kubernetes.io/projected/d6260b8b-c5f5-4803-8305-0b14903926c9-kube-api-access-476cn\") pod \"manila-operator-controller-manager-7775d87d9d-gpdwp\" (UID: \"d6260b8b-c5f5-4803-8305-0b14903926c9\") " pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.888301 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rszch\" (UniqueName: \"kubernetes.io/projected/240226bb-f320-4bd5-87ad-1d219c9e61e7-kube-api-access-rszch\") pod \"octavia-operator-controller-manager-7b89ddb58-x6prm\" (UID: \"240226bb-f320-4bd5-87ad-1d219c9e61e7\") " pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.888334 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jfl8\" (UniqueName: \"kubernetes.io/projected/802333ba-2384-4688-b939-28cbfda8bfc1-kube-api-access-4jfl8\") pod \"nova-operator-controller-manager-5644b66645-m7mbt\" (UID: \"802333ba-2384-4688-b939-28cbfda8bfc1\") " pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.888390 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98t9c\" (UniqueName: \"kubernetes.io/projected/3b0ccfa3-4ef3-4e3c-9127-59e1abc6631d-kube-api-access-98t9c\") pod \"mariadb-operator-controller-manager-67bf948998-gl2lx\" (UID: \"3b0ccfa3-4ef3-4e3c-9127-59e1abc6631d\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.888421 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwjxl\" (UniqueName: \"kubernetes.io/projected/6cee24b4-302f-48db-badb-39bcab5756d9-kube-api-access-jwjxl\") pod \"ovn-operator-controller-manager-788c46999f-mlxcw\" (UID: \"6cee24b4-302f-48db-badb-39bcab5756d9\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.888457 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-462sg\" (UniqueName: \"kubernetes.io/projected/7d3cec4a-da6f-431a-98d7-c4784bb248bc-kube-api-access-462sg\") pod \"neutron-operator-controller-manager-576995988b-tdxnl\" (UID: \"7d3cec4a-da6f-431a-98d7-c4784bb248bc\") " pod="openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.888497 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.893971 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-nkkp2" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.917417 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.922585 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98t9c\" (UniqueName: \"kubernetes.io/projected/3b0ccfa3-4ef3-4e3c-9127-59e1abc6631d-kube-api-access-98t9c\") pod \"mariadb-operator-controller-manager-67bf948998-gl2lx\" (UID: \"3b0ccfa3-4ef3-4e3c-9127-59e1abc6631d\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.924084 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.930341 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-476cn\" (UniqueName: \"kubernetes.io/projected/d6260b8b-c5f5-4803-8305-0b14903926c9-kube-api-access-476cn\") pod \"manila-operator-controller-manager-7775d87d9d-gpdwp\" (UID: \"d6260b8b-c5f5-4803-8305-0b14903926c9\") " pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.937117 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.937895 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.941256 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-vsb57" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.970723 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc"] Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.973133 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.990976 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwjxl\" (UniqueName: \"kubernetes.io/projected/6cee24b4-302f-48db-badb-39bcab5756d9-kube-api-access-jwjxl\") pod \"ovn-operator-controller-manager-788c46999f-mlxcw\" (UID: \"6cee24b4-302f-48db-badb-39bcab5756d9\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.991032 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stgp6\" (UniqueName: \"kubernetes.io/projected/faade3fc-fd45-4bcf-8aa5-0b0a3765581f-kube-api-access-stgp6\") pod \"telemetry-operator-controller-manager-565849b54-rwgmc\" (UID: \"faade3fc-fd45-4bcf-8aa5-0b0a3765581f\") " pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.991058 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-462sg\" (UniqueName: \"kubernetes.io/projected/7d3cec4a-da6f-431a-98d7-c4784bb248bc-kube-api-access-462sg\") pod \"neutron-operator-controller-manager-576995988b-tdxnl\" (UID: \"7d3cec4a-da6f-431a-98d7-c4784bb248bc\") " pod="openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.991077 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.991112 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdsww\" (UniqueName: \"kubernetes.io/projected/f6f38306-d4b2-46fa-9c49-8ac276362db8-kube-api-access-sdsww\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.991134 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs7ds\" (UniqueName: \"kubernetes.io/projected/ac5a5419-6887-45ea-944d-1c8f51816492-kube-api-access-vs7ds\") pod \"swift-operator-controller-manager-7b89fdf75b-94vx4\" (UID: \"ac5a5419-6887-45ea-944d-1c8f51816492\") " pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.991172 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rszch\" (UniqueName: \"kubernetes.io/projected/240226bb-f320-4bd5-87ad-1d219c9e61e7-kube-api-access-rszch\") pod \"octavia-operator-controller-manager-7b89ddb58-x6prm\" (UID: \"240226bb-f320-4bd5-87ad-1d219c9e61e7\") " pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.991193 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jfl8\" (UniqueName: \"kubernetes.io/projected/802333ba-2384-4688-b939-28cbfda8bfc1-kube-api-access-4jfl8\") pod \"nova-operator-controller-manager-5644b66645-m7mbt\" (UID: \"802333ba-2384-4688-b939-28cbfda8bfc1\") " pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" Feb 03 07:24:45 crc kubenswrapper[4708]: I0203 07:24:45.991225 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnx9x\" (UniqueName: \"kubernetes.io/projected/7d53946e-45e4-4abe-b4e7-d64339fdedd3-kube-api-access-jnx9x\") pod \"placement-operator-controller-manager-5b964cf4cd-k5zbb\" (UID: \"7d53946e-45e4-4abe-b4e7-d64339fdedd3\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb" Feb 03 07:24:45 crc kubenswrapper[4708]: E0203 07:24:45.991652 4708 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:24:45 crc kubenswrapper[4708]: E0203 07:24:45.991689 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert podName:f6f38306-d4b2-46fa-9c49-8ac276362db8 nodeName:}" failed. No retries permitted until 2026-02-03 07:24:46.491674837 +0000 UTC m=+865.473621644 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" (UID: "f6f38306-d4b2-46fa-9c49-8ac276362db8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.008029 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.017972 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.018910 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.022300 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jfl8\" (UniqueName: \"kubernetes.io/projected/802333ba-2384-4688-b939-28cbfda8bfc1-kube-api-access-4jfl8\") pod \"nova-operator-controller-manager-5644b66645-m7mbt\" (UID: \"802333ba-2384-4688-b939-28cbfda8bfc1\") " pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.025169 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-ns79r" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.039452 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwjxl\" (UniqueName: \"kubernetes.io/projected/6cee24b4-302f-48db-badb-39bcab5756d9-kube-api-access-jwjxl\") pod \"ovn-operator-controller-manager-788c46999f-mlxcw\" (UID: \"6cee24b4-302f-48db-badb-39bcab5756d9\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.040190 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-462sg\" (UniqueName: \"kubernetes.io/projected/7d3cec4a-da6f-431a-98d7-c4784bb248bc-kube-api-access-462sg\") pod \"neutron-operator-controller-manager-576995988b-tdxnl\" (UID: \"7d3cec4a-da6f-431a-98d7-c4784bb248bc\") " pod="openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.043406 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rszch\" (UniqueName: \"kubernetes.io/projected/240226bb-f320-4bd5-87ad-1d219c9e61e7-kube-api-access-rszch\") pod \"octavia-operator-controller-manager-7b89ddb58-x6prm\" (UID: \"240226bb-f320-4bd5-87ad-1d219c9e61e7\") " pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.053297 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdsww\" (UniqueName: \"kubernetes.io/projected/f6f38306-d4b2-46fa-9c49-8ac276362db8-kube-api-access-sdsww\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.066634 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.108823 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs7ds\" (UniqueName: \"kubernetes.io/projected/ac5a5419-6887-45ea-944d-1c8f51816492-kube-api-access-vs7ds\") pod \"swift-operator-controller-manager-7b89fdf75b-94vx4\" (UID: \"ac5a5419-6887-45ea-944d-1c8f51816492\") " pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.108902 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqxh7\" (UniqueName: \"kubernetes.io/projected/1d0931b6-6d69-4702-9b8c-93f1a6600bbe-kube-api-access-vqxh7\") pod \"test-operator-controller-manager-56f8bfcd9f-rxvkg\" (UID: \"1d0931b6-6d69-4702-9b8c-93f1a6600bbe\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.108981 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnx9x\" (UniqueName: \"kubernetes.io/projected/7d53946e-45e4-4abe-b4e7-d64339fdedd3-kube-api-access-jnx9x\") pod \"placement-operator-controller-manager-5b964cf4cd-k5zbb\" (UID: \"7d53946e-45e4-4abe-b4e7-d64339fdedd3\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.109037 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stgp6\" (UniqueName: \"kubernetes.io/projected/faade3fc-fd45-4bcf-8aa5-0b0a3765581f-kube-api-access-stgp6\") pod \"telemetry-operator-controller-manager-565849b54-rwgmc\" (UID: \"faade3fc-fd45-4bcf-8aa5-0b0a3765581f\") " pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.120274 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.137976 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.145126 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stgp6\" (UniqueName: \"kubernetes.io/projected/faade3fc-fd45-4bcf-8aa5-0b0a3765581f-kube-api-access-stgp6\") pod \"telemetry-operator-controller-manager-565849b54-rwgmc\" (UID: \"faade3fc-fd45-4bcf-8aa5-0b0a3765581f\") " pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.149484 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnx9x\" (UniqueName: \"kubernetes.io/projected/7d53946e-45e4-4abe-b4e7-d64339fdedd3-kube-api-access-jnx9x\") pod \"placement-operator-controller-manager-5b964cf4cd-k5zbb\" (UID: \"7d53946e-45e4-4abe-b4e7-d64339fdedd3\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.152102 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.166141 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.177638 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs7ds\" (UniqueName: \"kubernetes.io/projected/ac5a5419-6887-45ea-944d-1c8f51816492-kube-api-access-vs7ds\") pod \"swift-operator-controller-manager-7b89fdf75b-94vx4\" (UID: \"ac5a5419-6887-45ea-944d-1c8f51816492\") " pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.198095 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.212879 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqxh7\" (UniqueName: \"kubernetes.io/projected/1d0931b6-6d69-4702-9b8c-93f1a6600bbe-kube-api-access-vqxh7\") pod \"test-operator-controller-manager-56f8bfcd9f-rxvkg\" (UID: \"1d0931b6-6d69-4702-9b8c-93f1a6600bbe\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.212952 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.214309 4708 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.215679 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert podName:9f166dd2-52e4-473c-9168-c065582fa0e4 nodeName:}" failed. No retries permitted until 2026-02-03 07:24:47.214342171 +0000 UTC m=+866.196288978 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert") pod "infra-operator-controller-manager-79955696d6-btqlz" (UID: "9f166dd2-52e4-473c-9168-c065582fa0e4") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.225812 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.227095 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.227129 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.229020 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.229119 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.229522 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.231927 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.232114 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-c8vqz" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.231927 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.232074 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-kqgrm" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.251028 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.252282 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.252633 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.254102 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqxh7\" (UniqueName: \"kubernetes.io/projected/1d0931b6-6d69-4702-9b8c-93f1a6600bbe-kube-api-access-vqxh7\") pod \"test-operator-controller-manager-56f8bfcd9f-rxvkg\" (UID: \"1d0931b6-6d69-4702-9b8c-93f1a6600bbe\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.263072 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-qlgmg" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.265724 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.276210 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.303623 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.318689 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhwth\" (UniqueName: \"kubernetes.io/projected/5c9c90e2-345b-4a13-9acc-6e4d98113779-kube-api-access-dhwth\") pod \"watcher-operator-controller-manager-586b95b788-gldzv\" (UID: \"5c9c90e2-345b-4a13-9acc-6e4d98113779\") " pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.318728 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6b62\" (UniqueName: \"kubernetes.io/projected/3841da74-e9f4-4f19-ae3c-66e117029c51-kube-api-access-s6b62\") pod \"rabbitmq-cluster-operator-manager-668c99d594-mtgqj\" (UID: \"3841da74-e9f4-4f19-ae3c-66e117029c51\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.318773 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.318880 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97b66\" (UniqueName: \"kubernetes.io/projected/b5a7b398-66a9-4c39-a940-631bcc804dfe-kube-api-access-97b66\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.318902 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.366694 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.376482 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.381323 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.417054 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.422321 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97b66\" (UniqueName: \"kubernetes.io/projected/b5a7b398-66a9-4c39-a940-631bcc804dfe-kube-api-access-97b66\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.422365 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.422429 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhwth\" (UniqueName: \"kubernetes.io/projected/5c9c90e2-345b-4a13-9acc-6e4d98113779-kube-api-access-dhwth\") pod \"watcher-operator-controller-manager-586b95b788-gldzv\" (UID: \"5c9c90e2-345b-4a13-9acc-6e4d98113779\") " pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.422446 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6b62\" (UniqueName: \"kubernetes.io/projected/3841da74-e9f4-4f19-ae3c-66e117029c51-kube-api-access-s6b62\") pod \"rabbitmq-cluster-operator-manager-668c99d594-mtgqj\" (UID: \"3841da74-e9f4-4f19-ae3c-66e117029c51\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.422479 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.422633 4708 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.422689 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:24:46.922672127 +0000 UTC m=+865.904618934 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "metrics-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.423120 4708 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.423150 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:24:46.923141459 +0000 UTC m=+865.905088266 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "webhook-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.446256 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97b66\" (UniqueName: \"kubernetes.io/projected/b5a7b398-66a9-4c39-a940-631bcc804dfe-kube-api-access-97b66\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.456530 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6b62\" (UniqueName: \"kubernetes.io/projected/3841da74-e9f4-4f19-ae3c-66e117029c51-kube-api-access-s6b62\") pod \"rabbitmq-cluster-operator-manager-668c99d594-mtgqj\" (UID: \"3841da74-e9f4-4f19-ae3c-66e117029c51\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.458199 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhwth\" (UniqueName: \"kubernetes.io/projected/5c9c90e2-345b-4a13-9acc-6e4d98113779-kube-api-access-dhwth\") pod \"watcher-operator-controller-manager-586b95b788-gldzv\" (UID: \"5c9c90e2-345b-4a13-9acc-6e4d98113779\") " pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.524710 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.524896 4708 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.524949 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert podName:f6f38306-d4b2-46fa-9c49-8ac276362db8 nodeName:}" failed. No retries permitted until 2026-02-03 07:24:47.52493312 +0000 UTC m=+866.506879937 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" (UID: "f6f38306-d4b2-46fa-9c49-8ac276362db8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.587533 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.599264 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.714040 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.715715 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.730534 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.921820 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs"] Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.934435 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.934590 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.934602 4708 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.934666 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:24:47.934647335 +0000 UTC m=+866.916594152 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "webhook-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.934678 4708 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: E0203 07:24:46.934710 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:24:47.934699056 +0000 UTC m=+866.916645863 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "metrics-server-cert" not found Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.935408 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr"] Feb 03 07:24:46 crc kubenswrapper[4708]: W0203 07:24:46.970925 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35c2f81b_a6df_4f5c_98c9_e9efb7f362b4.slice/crio-17fba5335b03f0fcfe64709c58dd530e5953c4524ae046705d338e69fe94609a WatchSource:0}: Error finding container 17fba5335b03f0fcfe64709c58dd530e5953c4524ae046705d338e69fe94609a: Status 404 returned error can't find the container with id 17fba5335b03f0fcfe64709c58dd530e5953c4524ae046705d338e69fe94609a Feb 03 07:24:46 crc kubenswrapper[4708]: I0203 07:24:46.988209 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt"] Feb 03 07:24:46 crc kubenswrapper[4708]: W0203 07:24:46.996193 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod802333ba_2384_4688_b939_28cbfda8bfc1.slice/crio-5d2ac5f0fed1188f68a3fa2cdbfdcf3a8ecd29e8c891ff616adf6c33972d6f54 WatchSource:0}: Error finding container 5d2ac5f0fed1188f68a3fa2cdbfdcf3a8ecd29e8c891ff616adf6c33972d6f54: Status 404 returned error can't find the container with id 5d2ac5f0fed1188f68a3fa2cdbfdcf3a8ecd29e8c891ff616adf6c33972d6f54 Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.124765 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp"] Feb 03 07:24:47 crc kubenswrapper[4708]: W0203 07:24:47.139513 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6260b8b_c5f5_4803_8305_0b14903926c9.slice/crio-3e9f3fba9d00ad1f38d1cc573973433c2165c9324d475eafaf9c637be60817f3 WatchSource:0}: Error finding container 3e9f3fba9d00ad1f38d1cc573973433c2165c9324d475eafaf9c637be60817f3: Status 404 returned error can't find the container with id 3e9f3fba9d00ad1f38d1cc573973433c2165c9324d475eafaf9c637be60817f3 Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.143152 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl"] Feb 03 07:24:47 crc kubenswrapper[4708]: W0203 07:24:47.148421 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod240226bb_f320_4bd5_87ad_1d219c9e61e7.slice/crio-c55f846cff72300e2300c8a226b12ceec5f39bc08d0347c64f92512a8c385f57 WatchSource:0}: Error finding container c55f846cff72300e2300c8a226b12ceec5f39bc08d0347c64f92512a8c385f57: Status 404 returned error can't find the container with id c55f846cff72300e2300c8a226b12ceec5f39bc08d0347c64f92512a8c385f57 Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.149619 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm"] Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.159782 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb"] Feb 03 07:24:47 crc kubenswrapper[4708]: W0203 07:24:47.162434 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d3cec4a_da6f_431a_98d7_c4784bb248bc.slice/crio-c89aeea9f31e6561b958978629e9111add06bfc82be1223eacd3fc5f329b06dd WatchSource:0}: Error finding container c89aeea9f31e6561b958978629e9111add06bfc82be1223eacd3fc5f329b06dd: Status 404 returned error can't find the container with id c89aeea9f31e6561b958978629e9111add06bfc82be1223eacd3fc5f329b06dd Feb 03 07:24:47 crc kubenswrapper[4708]: W0203 07:24:47.162920 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e86abc8_c97c_4eef_b181_0d87376edd8f.slice/crio-ee057135eb1b3f882b5691ad75e4a51eb7bb7a9edfe1fde3be294b8d90685629 WatchSource:0}: Error finding container ee057135eb1b3f882b5691ad75e4a51eb7bb7a9edfe1fde3be294b8d90685629: Status 404 returned error can't find the container with id ee057135eb1b3f882b5691ad75e4a51eb7bb7a9edfe1fde3be294b8d90685629 Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.186589 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx"] Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.244725 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.245095 4708 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.245152 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert podName:9f166dd2-52e4-473c-9168-c065582fa0e4 nodeName:}" failed. No retries permitted until 2026-02-03 07:24:49.245134702 +0000 UTC m=+868.227081519 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert") pod "infra-operator-controller-manager-79955696d6-btqlz" (UID: "9f166dd2-52e4-473c-9168-c065582fa0e4") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.366543 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44" event={"ID":"794426b0-cf19-43ff-957e-3413c77f0570","Type":"ContainerStarted","Data":"0026d46046c135f193a38a093d14445bc62849c677aeb7e683826c4851239516"} Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.379384 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm" event={"ID":"240226bb-f320-4bd5-87ad-1d219c9e61e7","Type":"ContainerStarted","Data":"c55f846cff72300e2300c8a226b12ceec5f39bc08d0347c64f92512a8c385f57"} Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.397332 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb" event={"ID":"3e86abc8-c97c-4eef-b181-0d87376edd8f","Type":"ContainerStarted","Data":"ee057135eb1b3f882b5691ad75e4a51eb7bb7a9edfe1fde3be294b8d90685629"} Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.405073 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775" event={"ID":"f1edcba9-46e3-49fd-bb48-ba29b86c7bac","Type":"ContainerStarted","Data":"e079f2e8c9e470dde49de3dbda920d1e4787026dbe7e37ed8afb5490ec8ce319"} Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.409124 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95" event={"ID":"fe83b4e1-7562-495b-99bc-aa5d1202881c","Type":"ContainerStarted","Data":"1035a696e706007622b63565ed96ca211c36babf7cc4ad3f4407c09d5976fc59"} Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.414083 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs" event={"ID":"c6a27492-3276-45de-a2d9-1c605152a0b6","Type":"ContainerStarted","Data":"bc143ac5c4f74691ec28b9068d212e516e96bec8a76657ad7a5dc4372c0cdd09"} Feb 03 07:24:47 crc kubenswrapper[4708]: W0203 07:24:47.421759 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6cee24b4_302f_48db_badb_39bcab5756d9.slice/crio-82b6ac251c3defe88ab2abfc1f9d020a9d17ce5d2d33f9b9b81c37c645fe02e7 WatchSource:0}: Error finding container 82b6ac251c3defe88ab2abfc1f9d020a9d17ce5d2d33f9b9b81c37c645fe02e7: Status 404 returned error can't find the container with id 82b6ac251c3defe88ab2abfc1f9d020a9d17ce5d2d33f9b9b81c37c645fe02e7 Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.425334 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw"] Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.425606 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" event={"ID":"802333ba-2384-4688-b939-28cbfda8bfc1","Type":"ContainerStarted","Data":"5d2ac5f0fed1188f68a3fa2cdbfdcf3a8ecd29e8c891ff616adf6c33972d6f54"} Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.435638 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jwjxl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-788c46999f-mlxcw_openstack-operators(6cee24b4-302f-48db-badb-39bcab5756d9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.436871 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" podUID="6cee24b4-302f-48db-badb-39bcab5756d9" Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.438121 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp" event={"ID":"d6260b8b-c5f5-4803-8305-0b14903926c9","Type":"ContainerStarted","Data":"3e9f3fba9d00ad1f38d1cc573973433c2165c9324d475eafaf9c637be60817f3"} Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.439140 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/telemetry-operator@sha256:674639c6f9130078d6b5e4bace30435325651c82f3090681562c9cf6655b9576,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-stgp6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-565849b54-rwgmc_openstack-operators(faade3fc-fd45-4bcf-8aa5-0b0a3765581f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.440359 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" podUID="faade3fc-fd45-4bcf-8aa5-0b0a3765581f" Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.441452 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl" event={"ID":"7d3cec4a-da6f-431a-98d7-c4784bb248bc","Type":"ContainerStarted","Data":"c89aeea9f31e6561b958978629e9111add06bfc82be1223eacd3fc5f329b06dd"} Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.448144 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc"] Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.449758 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8" event={"ID":"0b3f5149-6624-450b-b3bd-be0d0ca78c73","Type":"ContainerStarted","Data":"6c9e6f9a7dc72218855e46cae96e0405ad291d81dbabfa0bb755fd0837b8e126"} Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.452200 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/swift-operator@sha256:8f8c3f4484960b48b4aa30b66deb78e54443e5d0a91ce7e34f3cd34675d7eda4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vs7ds,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-7b89fdf75b-94vx4_openstack-operators(ac5a5419-6887-45ea-944d-1c8f51816492): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.453483 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" podUID="ac5a5419-6887-45ea-944d-1c8f51816492" Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.454516 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr" event={"ID":"35c2f81b-a6df-4f5c-98c9-e9efb7f362b4","Type":"ContainerStarted","Data":"17fba5335b03f0fcfe64709c58dd530e5953c4524ae046705d338e69fe94609a"} Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.455866 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp" event={"ID":"bdd317ff-3849-4a28-9640-dd4611b86599","Type":"ContainerStarted","Data":"62f41cb7b32fe9ec2f358434cdc8dd14cfdb9c205d07d54c719ff83ff73da83f"} Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.457426 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb"] Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.458607 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx" event={"ID":"3b0ccfa3-4ef3-4e3c-9127-59e1abc6631d","Type":"ContainerStarted","Data":"f73f71c552a056a17bf5e9672535aa02a2a8210cac2218a5d73170a9a779074b"} Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.465766 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4"] Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.545489 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv"] Feb 03 07:24:47 crc kubenswrapper[4708]: W0203 07:24:47.553726 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c9c90e2_345b_4a13_9acc_6e4d98113779.slice/crio-d4383ac43fa8d5ed36a4a746d7e50e5689a157e3949d83521dac70471b72c56f WatchSource:0}: Error finding container d4383ac43fa8d5ed36a4a746d7e50e5689a157e3949d83521dac70471b72c56f: Status 404 returned error can't find the container with id d4383ac43fa8d5ed36a4a746d7e50e5689a157e3949d83521dac70471b72c56f Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.577022 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj"] Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.583515 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.583754 4708 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.583828 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert podName:f6f38306-d4b2-46fa-9c49-8ac276362db8 nodeName:}" failed. No retries permitted until 2026-02-03 07:24:49.58381031 +0000 UTC m=+868.565757117 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" (UID: "f6f38306-d4b2-46fa-9c49-8ac276362db8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:24:47 crc kubenswrapper[4708]: I0203 07:24:47.651344 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg"] Feb 03 07:24:47 crc kubenswrapper[4708]: W0203 07:24:47.653030 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3841da74_e9f4_4f19_ae3c_66e117029c51.slice/crio-e8ab2ef29eaa7b92fdf95403a2d1c8d0e45e75c57116c4ba4bfc4fef7e729f83 WatchSource:0}: Error finding container e8ab2ef29eaa7b92fdf95403a2d1c8d0e45e75c57116c4ba4bfc4fef7e729f83: Status 404 returned error can't find the container with id e8ab2ef29eaa7b92fdf95403a2d1c8d0e45e75c57116c4ba4bfc4fef7e729f83 Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.673407 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-s6b62,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-mtgqj_openstack-operators(3841da74-e9f4-4f19-ae3c-66e117029c51): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.674636 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" podUID="3841da74-e9f4-4f19-ae3c-66e117029c51" Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.675191 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vqxh7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-56f8bfcd9f-rxvkg_openstack-operators(1d0931b6-6d69-4702-9b8c-93f1a6600bbe): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 03 07:24:47 crc kubenswrapper[4708]: E0203 07:24:47.676404 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" podUID="1d0931b6-6d69-4702-9b8c-93f1a6600bbe" Feb 03 07:24:48 crc kubenswrapper[4708]: I0203 07:24:48.004531 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:48 crc kubenswrapper[4708]: I0203 07:24:48.004640 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:48 crc kubenswrapper[4708]: E0203 07:24:48.004726 4708 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:24:48 crc kubenswrapper[4708]: E0203 07:24:48.004777 4708 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:24:48 crc kubenswrapper[4708]: E0203 07:24:48.004818 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:24:50.004785507 +0000 UTC m=+868.986732314 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "webhook-server-cert" not found Feb 03 07:24:48 crc kubenswrapper[4708]: E0203 07:24:48.004861 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:24:50.004842268 +0000 UTC m=+868.986789065 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "metrics-server-cert" not found Feb 03 07:24:48 crc kubenswrapper[4708]: I0203 07:24:48.466117 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" event={"ID":"5c9c90e2-345b-4a13-9acc-6e4d98113779","Type":"ContainerStarted","Data":"d4383ac43fa8d5ed36a4a746d7e50e5689a157e3949d83521dac70471b72c56f"} Feb 03 07:24:48 crc kubenswrapper[4708]: I0203 07:24:48.469396 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" event={"ID":"ac5a5419-6887-45ea-944d-1c8f51816492","Type":"ContainerStarted","Data":"e7c6440b4e8ebc456148c51808e17676b309fbe15119764e3948225b6a13a97c"} Feb 03 07:24:48 crc kubenswrapper[4708]: E0203 07:24:48.471224 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/swift-operator@sha256:8f8c3f4484960b48b4aa30b66deb78e54443e5d0a91ce7e34f3cd34675d7eda4\\\"\"" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" podUID="ac5a5419-6887-45ea-944d-1c8f51816492" Feb 03 07:24:48 crc kubenswrapper[4708]: I0203 07:24:48.472201 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" event={"ID":"faade3fc-fd45-4bcf-8aa5-0b0a3765581f","Type":"ContainerStarted","Data":"6ad9f00f1ec964c6a458e67558840293c6e69532483aaa924cfbc4c93cd761a7"} Feb 03 07:24:48 crc kubenswrapper[4708]: E0203 07:24:48.473326 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/telemetry-operator@sha256:674639c6f9130078d6b5e4bace30435325651c82f3090681562c9cf6655b9576\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" podUID="faade3fc-fd45-4bcf-8aa5-0b0a3765581f" Feb 03 07:24:48 crc kubenswrapper[4708]: I0203 07:24:48.473649 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" event={"ID":"6cee24b4-302f-48db-badb-39bcab5756d9","Type":"ContainerStarted","Data":"82b6ac251c3defe88ab2abfc1f9d020a9d17ce5d2d33f9b9b81c37c645fe02e7"} Feb 03 07:24:48 crc kubenswrapper[4708]: I0203 07:24:48.475554 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" event={"ID":"1d0931b6-6d69-4702-9b8c-93f1a6600bbe","Type":"ContainerStarted","Data":"5b7da6dd5a06004a6277e03c1727fae92a8a9912e23860e503312ba4c78a7839"} Feb 03 07:24:48 crc kubenswrapper[4708]: E0203 07:24:48.475726 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" podUID="6cee24b4-302f-48db-badb-39bcab5756d9" Feb 03 07:24:48 crc kubenswrapper[4708]: I0203 07:24:48.476916 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" event={"ID":"3841da74-e9f4-4f19-ae3c-66e117029c51","Type":"ContainerStarted","Data":"e8ab2ef29eaa7b92fdf95403a2d1c8d0e45e75c57116c4ba4bfc4fef7e729f83"} Feb 03 07:24:48 crc kubenswrapper[4708]: E0203 07:24:48.478076 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" podUID="1d0931b6-6d69-4702-9b8c-93f1a6600bbe" Feb 03 07:24:48 crc kubenswrapper[4708]: E0203 07:24:48.478121 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" podUID="3841da74-e9f4-4f19-ae3c-66e117029c51" Feb 03 07:24:48 crc kubenswrapper[4708]: I0203 07:24:48.478607 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb" event={"ID":"7d53946e-45e4-4abe-b4e7-d64339fdedd3","Type":"ContainerStarted","Data":"61fd49e591e5b0d75931adf385328ecbaa5f2aadc22e3cb894a705982703c1fd"} Feb 03 07:24:49 crc kubenswrapper[4708]: I0203 07:24:49.324398 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:24:49 crc kubenswrapper[4708]: E0203 07:24:49.324585 4708 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:24:49 crc kubenswrapper[4708]: E0203 07:24:49.324631 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert podName:9f166dd2-52e4-473c-9168-c065582fa0e4 nodeName:}" failed. No retries permitted until 2026-02-03 07:24:53.324616646 +0000 UTC m=+872.306563453 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert") pod "infra-operator-controller-manager-79955696d6-btqlz" (UID: "9f166dd2-52e4-473c-9168-c065582fa0e4") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:24:49 crc kubenswrapper[4708]: E0203 07:24:49.492855 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" podUID="1d0931b6-6d69-4702-9b8c-93f1a6600bbe" Feb 03 07:24:49 crc kubenswrapper[4708]: E0203 07:24:49.493194 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/telemetry-operator@sha256:674639c6f9130078d6b5e4bace30435325651c82f3090681562c9cf6655b9576\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" podUID="faade3fc-fd45-4bcf-8aa5-0b0a3765581f" Feb 03 07:24:49 crc kubenswrapper[4708]: E0203 07:24:49.493253 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/swift-operator@sha256:8f8c3f4484960b48b4aa30b66deb78e54443e5d0a91ce7e34f3cd34675d7eda4\\\"\"" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" podUID="ac5a5419-6887-45ea-944d-1c8f51816492" Feb 03 07:24:49 crc kubenswrapper[4708]: E0203 07:24:49.493291 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:ea7b72b648a5bde2eebd804c2a5c1608d448a4892176c1b8d000c1eef4bb92b4\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" podUID="6cee24b4-302f-48db-badb-39bcab5756d9" Feb 03 07:24:49 crc kubenswrapper[4708]: E0203 07:24:49.493332 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" podUID="3841da74-e9f4-4f19-ae3c-66e117029c51" Feb 03 07:24:49 crc kubenswrapper[4708]: I0203 07:24:49.630911 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:24:49 crc kubenswrapper[4708]: E0203 07:24:49.631455 4708 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:24:49 crc kubenswrapper[4708]: E0203 07:24:49.631504 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert podName:f6f38306-d4b2-46fa-9c49-8ac276362db8 nodeName:}" failed. No retries permitted until 2026-02-03 07:24:53.631487345 +0000 UTC m=+872.613434152 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" (UID: "f6f38306-d4b2-46fa-9c49-8ac276362db8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:24:50 crc kubenswrapper[4708]: I0203 07:24:50.038692 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:50 crc kubenswrapper[4708]: I0203 07:24:50.038821 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:50 crc kubenswrapper[4708]: E0203 07:24:50.038978 4708 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:24:50 crc kubenswrapper[4708]: E0203 07:24:50.039033 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:24:54.039013337 +0000 UTC m=+873.020960144 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "webhook-server-cert" not found Feb 03 07:24:50 crc kubenswrapper[4708]: E0203 07:24:50.039959 4708 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:24:50 crc kubenswrapper[4708]: E0203 07:24:50.040003 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:24:54.039992381 +0000 UTC m=+873.021939188 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "metrics-server-cert" not found Feb 03 07:24:53 crc kubenswrapper[4708]: I0203 07:24:53.385519 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:24:53 crc kubenswrapper[4708]: E0203 07:24:53.385711 4708 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:24:53 crc kubenswrapper[4708]: E0203 07:24:53.386039 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert podName:9f166dd2-52e4-473c-9168-c065582fa0e4 nodeName:}" failed. No retries permitted until 2026-02-03 07:25:01.386022097 +0000 UTC m=+880.367968904 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert") pod "infra-operator-controller-manager-79955696d6-btqlz" (UID: "9f166dd2-52e4-473c-9168-c065582fa0e4") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:24:53 crc kubenswrapper[4708]: I0203 07:24:53.695694 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:24:53 crc kubenswrapper[4708]: E0203 07:24:53.695903 4708 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:24:53 crc kubenswrapper[4708]: E0203 07:24:53.696165 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert podName:f6f38306-d4b2-46fa-9c49-8ac276362db8 nodeName:}" failed. No retries permitted until 2026-02-03 07:25:01.696144424 +0000 UTC m=+880.678091231 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" (UID: "f6f38306-d4b2-46fa-9c49-8ac276362db8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:24:54 crc kubenswrapper[4708]: I0203 07:24:54.101632 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:54 crc kubenswrapper[4708]: E0203 07:24:54.101743 4708 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:24:54 crc kubenswrapper[4708]: E0203 07:24:54.101787 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:25:02.101774051 +0000 UTC m=+881.083720848 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "metrics-server-cert" not found Feb 03 07:24:54 crc kubenswrapper[4708]: E0203 07:24:54.102394 4708 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:24:54 crc kubenswrapper[4708]: I0203 07:24:54.102457 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:24:54 crc kubenswrapper[4708]: E0203 07:24:54.102568 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:25:02.102557691 +0000 UTC m=+881.084504498 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "webhook-server-cert" not found Feb 03 07:24:59 crc kubenswrapper[4708]: E0203 07:24:59.981079 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/watcher-operator@sha256:3fd1f7623a4b32505f51f329116f7e13bb4cfd320e920961a5b86441a89326d6" Feb 03 07:24:59 crc kubenswrapper[4708]: E0203 07:24:59.981830 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/watcher-operator@sha256:3fd1f7623a4b32505f51f329116f7e13bb4cfd320e920961a5b86441a89326d6,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dhwth,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-586b95b788-gldzv_openstack-operators(5c9c90e2-345b-4a13-9acc-6e4d98113779): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:24:59 crc kubenswrapper[4708]: E0203 07:24:59.983442 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" podUID="5c9c90e2-345b-4a13-9acc-6e4d98113779" Feb 03 07:25:00 crc kubenswrapper[4708]: E0203 07:25:00.579361 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/watcher-operator@sha256:3fd1f7623a4b32505f51f329116f7e13bb4cfd320e920961a5b86441a89326d6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" podUID="5c9c90e2-345b-4a13-9acc-6e4d98113779" Feb 03 07:25:01 crc kubenswrapper[4708]: E0203 07:25:01.073069 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/nova-operator@sha256:6b951a651861f6e805ceec19cad5a35a8dfe6fd9536acebd3c197ca4659d8a51" Feb 03 07:25:01 crc kubenswrapper[4708]: E0203 07:25:01.073262 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/lmiccini/nova-operator@sha256:6b951a651861f6e805ceec19cad5a35a8dfe6fd9536acebd3c197ca4659d8a51,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4jfl8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-5644b66645-m7mbt_openstack-operators(802333ba-2384-4688-b939-28cbfda8bfc1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:25:01 crc kubenswrapper[4708]: E0203 07:25:01.074438 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" podUID="802333ba-2384-4688-b939-28cbfda8bfc1" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.408670 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:25:01 crc kubenswrapper[4708]: E0203 07:25:01.409090 4708 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 03 07:25:01 crc kubenswrapper[4708]: E0203 07:25:01.409229 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert podName:9f166dd2-52e4-473c-9168-c065582fa0e4 nodeName:}" failed. No retries permitted until 2026-02-03 07:25:17.409196889 +0000 UTC m=+896.391143766 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert") pod "infra-operator-controller-manager-79955696d6-btqlz" (UID: "9f166dd2-52e4-473c-9168-c065582fa0e4") : secret "infra-operator-webhook-server-cert" not found Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.600095 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm" event={"ID":"240226bb-f320-4bd5-87ad-1d219c9e61e7","Type":"ContainerStarted","Data":"efce3d7c2d49a4f31cdb2c878faacfd4cb1cdf8a676901abd8941706af7e08b8"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.600319 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.607541 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx" event={"ID":"3b0ccfa3-4ef3-4e3c-9127-59e1abc6631d","Type":"ContainerStarted","Data":"d6777b1d577537ae8a0e0b18ea590390639218b4ee22e944942cf6e62776307d"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.608230 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.613633 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8" event={"ID":"0b3f5149-6624-450b-b3bd-be0d0ca78c73","Type":"ContainerStarted","Data":"07112910ad9283bb35a817cf2eec9cb2c2b69c0b2be1b13481d119e46c5648c9"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.613724 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.616687 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb" event={"ID":"7d53946e-45e4-4abe-b4e7-d64339fdedd3","Type":"ContainerStarted","Data":"c5717872a321d2df0d6289eb447114cf4a74a20c8d354fcf4d7e3d13fae3a361"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.616820 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.626892 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.628545 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95" event={"ID":"fe83b4e1-7562-495b-99bc-aa5d1202881c","Type":"ContainerStarted","Data":"0a59b126cf75bdb6cbb96ce9368afa36815ddf24abd4cc19497b736b17e4a919"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.628696 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.630105 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb" event={"ID":"3e86abc8-c97c-4eef-b181-0d87376edd8f","Type":"ContainerStarted","Data":"afdf042089273ae3c2ba5b73724892c6b5272618fce93be90065c6c482b7c888"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.630228 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.631411 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr" event={"ID":"35c2f81b-a6df-4f5c-98c9-e9efb7f362b4","Type":"ContainerStarted","Data":"19947b5a2a1dd8f9d6c83aa3464fcfb7385192877e62181efa1928a620101687"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.631678 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.633142 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp" event={"ID":"bdd317ff-3849-4a28-9640-dd4611b86599","Type":"ContainerStarted","Data":"e9a3dcfb26b02d05c868ea4fa524576d7cbe9c851a52b0d41167ece811d37634"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.633485 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.634302 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs" event={"ID":"c6a27492-3276-45de-a2d9-1c605152a0b6","Type":"ContainerStarted","Data":"819ac17b4434ae6afee28bbe96baf889c6020745e130422514c73ed84660f1c0"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.634611 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.636094 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44" event={"ID":"794426b0-cf19-43ff-957e-3413c77f0570","Type":"ContainerStarted","Data":"19b7f43e1f084de753618b5aad59401b447715deab0268010de155c41b18d337"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.636219 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.641491 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp" event={"ID":"d6260b8b-c5f5-4803-8305-0b14903926c9","Type":"ContainerStarted","Data":"91eb002a00cdfcdde56bde78e913c6a14ca2c7999ed936427895cbc7335e412c"} Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.641530 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp" Feb 03 07:25:01 crc kubenswrapper[4708]: E0203 07:25:01.645733 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/nova-operator@sha256:6b951a651861f6e805ceec19cad5a35a8dfe6fd9536acebd3c197ca4659d8a51\\\"\"" pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" podUID="802333ba-2384-4688-b939-28cbfda8bfc1" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.666650 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775" podStartSLOduration=2.088886264 podStartE2EDuration="16.666630273s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:46.498524391 +0000 UTC m=+865.480471198" lastFinishedPulling="2026-02-03 07:25:01.0762684 +0000 UTC m=+880.058215207" observedRunningTime="2026-02-03 07:25:01.664718037 +0000 UTC m=+880.646664844" watchObservedRunningTime="2026-02-03 07:25:01.666630273 +0000 UTC m=+880.648577080" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.667568 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm" podStartSLOduration=2.775227347 podStartE2EDuration="16.667563636s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.153922136 +0000 UTC m=+866.135868933" lastFinishedPulling="2026-02-03 07:25:01.046258415 +0000 UTC m=+880.028205222" observedRunningTime="2026-02-03 07:25:01.638389891 +0000 UTC m=+880.620336698" watchObservedRunningTime="2026-02-03 07:25:01.667563636 +0000 UTC m=+880.649510443" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.714989 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:25:01 crc kubenswrapper[4708]: E0203 07:25:01.717017 4708 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:25:01 crc kubenswrapper[4708]: E0203 07:25:01.717075 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert podName:f6f38306-d4b2-46fa-9c49-8ac276362db8 nodeName:}" failed. No retries permitted until 2026-02-03 07:25:17.717057943 +0000 UTC m=+896.699004850 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" (UID: "f6f38306-d4b2-46fa-9c49-8ac276362db8") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.734281 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx" podStartSLOduration=2.916214416 podStartE2EDuration="16.734259978s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.227932536 +0000 UTC m=+866.209879343" lastFinishedPulling="2026-02-03 07:25:01.045978098 +0000 UTC m=+880.027924905" observedRunningTime="2026-02-03 07:25:01.701622229 +0000 UTC m=+880.683569046" watchObservedRunningTime="2026-02-03 07:25:01.734259978 +0000 UTC m=+880.716206785" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.735092 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95" podStartSLOduration=2.7456130229999998 podStartE2EDuration="16.735086179s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:46.479399269 +0000 UTC m=+865.461346076" lastFinishedPulling="2026-02-03 07:25:00.468872435 +0000 UTC m=+879.450819232" observedRunningTime="2026-02-03 07:25:01.73225374 +0000 UTC m=+880.714200547" watchObservedRunningTime="2026-02-03 07:25:01.735086179 +0000 UTC m=+880.717032986" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.769359 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr" podStartSLOduration=3.28329212 podStartE2EDuration="16.769343896s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:46.982992334 +0000 UTC m=+865.964939141" lastFinishedPulling="2026-02-03 07:25:00.4690441 +0000 UTC m=+879.450990917" observedRunningTime="2026-02-03 07:25:01.767574984 +0000 UTC m=+880.749521791" watchObservedRunningTime="2026-02-03 07:25:01.769343896 +0000 UTC m=+880.751290703" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.799893 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs" podStartSLOduration=2.718467105 podStartE2EDuration="16.799877965s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:46.964280641 +0000 UTC m=+865.946227448" lastFinishedPulling="2026-02-03 07:25:01.045691501 +0000 UTC m=+880.027638308" observedRunningTime="2026-02-03 07:25:01.797155779 +0000 UTC m=+880.779102576" watchObservedRunningTime="2026-02-03 07:25:01.799877965 +0000 UTC m=+880.781824772" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.827409 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp" podStartSLOduration=2.543365702 podStartE2EDuration="16.82739232s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:46.792252692 +0000 UTC m=+865.774199509" lastFinishedPulling="2026-02-03 07:25:01.07627933 +0000 UTC m=+880.058226127" observedRunningTime="2026-02-03 07:25:01.826488548 +0000 UTC m=+880.808435355" watchObservedRunningTime="2026-02-03 07:25:01.82739232 +0000 UTC m=+880.809339127" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.852660 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8" podStartSLOduration=2.599493329 podStartE2EDuration="16.852642451s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:46.792012076 +0000 UTC m=+865.773958893" lastFinishedPulling="2026-02-03 07:25:01.045161208 +0000 UTC m=+880.027108015" observedRunningTime="2026-02-03 07:25:01.851158585 +0000 UTC m=+880.833105392" watchObservedRunningTime="2026-02-03 07:25:01.852642451 +0000 UTC m=+880.834589258" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.873276 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44" podStartSLOduration=2.607092712 podStartE2EDuration="16.873259519s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:46.816314014 +0000 UTC m=+865.798260821" lastFinishedPulling="2026-02-03 07:25:01.082480821 +0000 UTC m=+880.064427628" observedRunningTime="2026-02-03 07:25:01.871033725 +0000 UTC m=+880.852980522" watchObservedRunningTime="2026-02-03 07:25:01.873259519 +0000 UTC m=+880.855206326" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.910984 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb" podStartSLOduration=2.996665071 podStartE2EDuration="16.91096906s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.213837945 +0000 UTC m=+866.195784752" lastFinishedPulling="2026-02-03 07:25:01.128141934 +0000 UTC m=+880.110088741" observedRunningTime="2026-02-03 07:25:01.907576899 +0000 UTC m=+880.889523706" watchObservedRunningTime="2026-02-03 07:25:01.91096906 +0000 UTC m=+880.892915857" Feb 03 07:25:01 crc kubenswrapper[4708]: I0203 07:25:01.952265 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb" podStartSLOduration=3.284853678 podStartE2EDuration="16.952242719s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.413398189 +0000 UTC m=+866.395344996" lastFinishedPulling="2026-02-03 07:25:01.08078723 +0000 UTC m=+880.062734037" observedRunningTime="2026-02-03 07:25:01.947224147 +0000 UTC m=+880.929170944" watchObservedRunningTime="2026-02-03 07:25:01.952242719 +0000 UTC m=+880.934189526" Feb 03 07:25:02 crc kubenswrapper[4708]: I0203 07:25:02.036172 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp" podStartSLOduration=3.077587617 podStartE2EDuration="17.036158107s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.146871706 +0000 UTC m=+866.128818513" lastFinishedPulling="2026-02-03 07:25:01.105442196 +0000 UTC m=+880.087389003" observedRunningTime="2026-02-03 07:25:02.033467132 +0000 UTC m=+881.015413939" watchObservedRunningTime="2026-02-03 07:25:02.036158107 +0000 UTC m=+881.018104914" Feb 03 07:25:02 crc kubenswrapper[4708]: I0203 07:25:02.129540 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:25:02 crc kubenswrapper[4708]: I0203 07:25:02.129638 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:25:02 crc kubenswrapper[4708]: E0203 07:25:02.129730 4708 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 03 07:25:02 crc kubenswrapper[4708]: E0203 07:25:02.129770 4708 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 03 07:25:02 crc kubenswrapper[4708]: E0203 07:25:02.129840 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:25:18.129822642 +0000 UTC m=+897.111769449 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "webhook-server-cert" not found Feb 03 07:25:02 crc kubenswrapper[4708]: E0203 07:25:02.129857 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs podName:b5a7b398-66a9-4c39-a940-631bcc804dfe nodeName:}" failed. No retries permitted until 2026-02-03 07:25:18.129851693 +0000 UTC m=+897.111798500 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs") pod "openstack-operator-controller-manager-79f7df8fc4-bhfdd" (UID: "b5a7b398-66a9-4c39-a940-631bcc804dfe") : secret "metrics-server-cert" not found Feb 03 07:25:02 crc kubenswrapper[4708]: I0203 07:25:02.691557 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl" event={"ID":"7d3cec4a-da6f-431a-98d7-c4784bb248bc","Type":"ContainerStarted","Data":"ea840f22f425670f6ba2a237fbc025c9d228229e17c507cfe569af3f2761da65"} Feb 03 07:25:02 crc kubenswrapper[4708]: I0203 07:25:02.691846 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl" Feb 03 07:25:02 crc kubenswrapper[4708]: I0203 07:25:02.703391 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775" event={"ID":"f1edcba9-46e3-49fd-bb48-ba29b86c7bac","Type":"ContainerStarted","Data":"c7f55645135e9b18598661d1d138b81428d63809c17389feba8b50d2162ee02a"} Feb 03 07:25:02 crc kubenswrapper[4708]: I0203 07:25:02.720805 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl" podStartSLOduration=3.851679292 podStartE2EDuration="17.720762979s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.213424644 +0000 UTC m=+866.195371451" lastFinishedPulling="2026-02-03 07:25:01.082508341 +0000 UTC m=+880.064455138" observedRunningTime="2026-02-03 07:25:02.71501774 +0000 UTC m=+881.696964547" watchObservedRunningTime="2026-02-03 07:25:02.720762979 +0000 UTC m=+881.702709786" Feb 03 07:25:06 crc kubenswrapper[4708]: I0203 07:25:06.014011 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-64469b487f-kccqb" Feb 03 07:25:06 crc kubenswrapper[4708]: I0203 07:25:06.123985 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7775d87d9d-gpdwp" Feb 03 07:25:06 crc kubenswrapper[4708]: I0203 07:25:06.144842 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-gl2lx" Feb 03 07:25:06 crc kubenswrapper[4708]: I0203 07:25:06.198047 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7b89ddb58-x6prm" Feb 03 07:25:06 crc kubenswrapper[4708]: I0203 07:25:06.199874 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-576995988b-tdxnl" Feb 03 07:25:06 crc kubenswrapper[4708]: I0203 07:25:06.282852 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-k5zbb" Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.753253 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" event={"ID":"1d0931b6-6d69-4702-9b8c-93f1a6600bbe","Type":"ContainerStarted","Data":"f76e4c085e2e8b6310f04d673dc4f37e9bcb42b6021f1f2ca8334b286273f4e1"} Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.753885 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.755247 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" event={"ID":"3841da74-e9f4-4f19-ae3c-66e117029c51","Type":"ContainerStarted","Data":"2881b7781b14397daf9f8324ca3ca05d0f9602b6fb3d562dc3baaa768d04c47a"} Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.757371 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" event={"ID":"ac5a5419-6887-45ea-944d-1c8f51816492","Type":"ContainerStarted","Data":"470fc1a2dd89048f1e758aba456b91bd586bb422be9e57564e708ebaf7f30faf"} Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.757630 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.760049 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" event={"ID":"faade3fc-fd45-4bcf-8aa5-0b0a3765581f","Type":"ContainerStarted","Data":"141b7215bf9037d1093866f2fa4e29360b948ea82960016e9209f7766831cf3f"} Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.760306 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.761898 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" event={"ID":"6cee24b4-302f-48db-badb-39bcab5756d9","Type":"ContainerStarted","Data":"f45586b9c5afb5eea2438b1402baba687a31ea938271f654c9e863e43fe2224b"} Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.762100 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.774925 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" podStartSLOduration=3.526791628 podStartE2EDuration="23.774907817s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.675002734 +0000 UTC m=+866.656949541" lastFinishedPulling="2026-02-03 07:25:07.923118923 +0000 UTC m=+886.905065730" observedRunningTime="2026-02-03 07:25:08.771924065 +0000 UTC m=+887.753870882" watchObservedRunningTime="2026-02-03 07:25:08.774907817 +0000 UTC m=+887.756854624" Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.787746 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" podStartSLOduration=3.9435140730000002 podStartE2EDuration="23.787724187s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.439024879 +0000 UTC m=+866.420971686" lastFinishedPulling="2026-02-03 07:25:07.283234973 +0000 UTC m=+886.265181800" observedRunningTime="2026-02-03 07:25:08.785806331 +0000 UTC m=+887.767753138" watchObservedRunningTime="2026-02-03 07:25:08.787724187 +0000 UTC m=+887.769670994" Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.806904 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" podStartSLOduration=3.320754257 podStartE2EDuration="23.80687808s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.435414692 +0000 UTC m=+866.417361499" lastFinishedPulling="2026-02-03 07:25:07.921538515 +0000 UTC m=+886.903485322" observedRunningTime="2026-02-03 07:25:08.801741766 +0000 UTC m=+887.783688573" watchObservedRunningTime="2026-02-03 07:25:08.80687808 +0000 UTC m=+887.788824877" Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.841550 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" podStartSLOduration=3.3361330479999998 podStartE2EDuration="23.841528737s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.452023574 +0000 UTC m=+866.433970381" lastFinishedPulling="2026-02-03 07:25:07.957419263 +0000 UTC m=+886.939366070" observedRunningTime="2026-02-03 07:25:08.832479669 +0000 UTC m=+887.814426476" watchObservedRunningTime="2026-02-03 07:25:08.841528737 +0000 UTC m=+887.823475564" Feb 03 07:25:08 crc kubenswrapper[4708]: I0203 07:25:08.857362 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-mtgqj" podStartSLOduration=2.619975762 podStartE2EDuration="22.85733933s" podCreationTimestamp="2026-02-03 07:24:46 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.673227202 +0000 UTC m=+866.655174009" lastFinishedPulling="2026-02-03 07:25:07.91059077 +0000 UTC m=+886.892537577" observedRunningTime="2026-02-03 07:25:08.849513051 +0000 UTC m=+887.831459868" watchObservedRunningTime="2026-02-03 07:25:08.85733933 +0000 UTC m=+887.839286137" Feb 03 07:25:14 crc kubenswrapper[4708]: I0203 07:25:14.809911 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" event={"ID":"802333ba-2384-4688-b939-28cbfda8bfc1","Type":"ContainerStarted","Data":"c2bb8cf98519277967dff7362c802fa609b3d025438c9338f9f17a48ee339235"} Feb 03 07:25:14 crc kubenswrapper[4708]: I0203 07:25:14.811870 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" Feb 03 07:25:14 crc kubenswrapper[4708]: I0203 07:25:14.831146 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" podStartSLOduration=2.273483547 podStartE2EDuration="29.831128124s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:46.998062008 +0000 UTC m=+865.980008815" lastFinishedPulling="2026-02-03 07:25:14.555706575 +0000 UTC m=+893.537653392" observedRunningTime="2026-02-03 07:25:14.827816935 +0000 UTC m=+893.809763762" watchObservedRunningTime="2026-02-03 07:25:14.831128124 +0000 UTC m=+893.813074931" Feb 03 07:25:15 crc kubenswrapper[4708]: I0203 07:25:15.721859 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-fc589b45f-mqk95" Feb 03 07:25:15 crc kubenswrapper[4708]: I0203 07:25:15.734918 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-866f9bb544-m4775" Feb 03 07:25:15 crc kubenswrapper[4708]: I0203 07:25:15.754285 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-8f4c5cb64-d2ddp" Feb 03 07:25:15 crc kubenswrapper[4708]: I0203 07:25:15.790927 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5d77f4dbc9-7vww8" Feb 03 07:25:15 crc kubenswrapper[4708]: I0203 07:25:15.829574 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" event={"ID":"5c9c90e2-345b-4a13-9acc-6e4d98113779","Type":"ContainerStarted","Data":"d0d3f503c719a4f58affee39a23d06960998322b68c198465dd31e5f79085bef"} Feb 03 07:25:15 crc kubenswrapper[4708]: I0203 07:25:15.829856 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" Feb 03 07:25:15 crc kubenswrapper[4708]: I0203 07:25:15.834464 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-65dc6c8d9c-bgm44" Feb 03 07:25:15 crc kubenswrapper[4708]: I0203 07:25:15.860623 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-gqvwr" Feb 03 07:25:15 crc kubenswrapper[4708]: I0203 07:25:15.863728 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" podStartSLOduration=3.619591992 podStartE2EDuration="30.863708349s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:24:47.555894125 +0000 UTC m=+866.537840932" lastFinishedPulling="2026-02-03 07:25:14.800010482 +0000 UTC m=+893.781957289" observedRunningTime="2026-02-03 07:25:15.858062233 +0000 UTC m=+894.840009050" watchObservedRunningTime="2026-02-03 07:25:15.863708349 +0000 UTC m=+894.845655156" Feb 03 07:25:15 crc kubenswrapper[4708]: I0203 07:25:15.975909 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5d86df5cd7-2pljs" Feb 03 07:25:16 crc kubenswrapper[4708]: I0203 07:25:16.256198 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-mlxcw" Feb 03 07:25:16 crc kubenswrapper[4708]: I0203 07:25:16.316186 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-7b89fdf75b-94vx4" Feb 03 07:25:16 crc kubenswrapper[4708]: I0203 07:25:16.369494 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-565849b54-rwgmc" Feb 03 07:25:16 crc kubenswrapper[4708]: I0203 07:25:16.387147 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-rxvkg" Feb 03 07:25:17 crc kubenswrapper[4708]: I0203 07:25:17.465347 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:25:17 crc kubenswrapper[4708]: I0203 07:25:17.475391 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9f166dd2-52e4-473c-9168-c065582fa0e4-cert\") pod \"infra-operator-controller-manager-79955696d6-btqlz\" (UID: \"9f166dd2-52e4-473c-9168-c065582fa0e4\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:25:17 crc kubenswrapper[4708]: I0203 07:25:17.657501 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:25:17 crc kubenswrapper[4708]: I0203 07:25:17.773957 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:25:17 crc kubenswrapper[4708]: I0203 07:25:17.786044 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f6f38306-d4b2-46fa-9c49-8ac276362db8-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62\" (UID: \"f6f38306-d4b2-46fa-9c49-8ac276362db8\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.010544 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.104836 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-btqlz"] Feb 03 07:25:18 crc kubenswrapper[4708]: W0203 07:25:18.118685 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f166dd2_52e4_473c_9168_c065582fa0e4.slice/crio-9d559d5afc11037b60b48aea95187b99fa3179ceafa25a5b4471087b4e4615cd WatchSource:0}: Error finding container 9d559d5afc11037b60b48aea95187b99fa3179ceafa25a5b4471087b4e4615cd: Status 404 returned error can't find the container with id 9d559d5afc11037b60b48aea95187b99fa3179ceafa25a5b4471087b4e4615cd Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.179774 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.179954 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.185189 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-metrics-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.186641 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b5a7b398-66a9-4c39-a940-631bcc804dfe-webhook-certs\") pod \"openstack-operator-controller-manager-79f7df8fc4-bhfdd\" (UID: \"b5a7b398-66a9-4c39-a940-631bcc804dfe\") " pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.365107 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.497251 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62"] Feb 03 07:25:18 crc kubenswrapper[4708]: W0203 07:25:18.504525 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6f38306_d4b2_46fa_9c49_8ac276362db8.slice/crio-7d14cb7cea017b51f0fb88201210b32adfa2b9ece93834ca13773423639585da WatchSource:0}: Error finding container 7d14cb7cea017b51f0fb88201210b32adfa2b9ece93834ca13773423639585da: Status 404 returned error can't find the container with id 7d14cb7cea017b51f0fb88201210b32adfa2b9ece93834ca13773423639585da Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.807747 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd"] Feb 03 07:25:18 crc kubenswrapper[4708]: W0203 07:25:18.811063 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5a7b398_66a9_4c39_a940_631bcc804dfe.slice/crio-a780a33be5e1a4706051fa7e2f301b32a1360c3f57a5103126da38bae8627016 WatchSource:0}: Error finding container a780a33be5e1a4706051fa7e2f301b32a1360c3f57a5103126da38bae8627016: Status 404 returned error can't find the container with id a780a33be5e1a4706051fa7e2f301b32a1360c3f57a5103126da38bae8627016 Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.852992 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" event={"ID":"b5a7b398-66a9-4c39-a940-631bcc804dfe","Type":"ContainerStarted","Data":"a780a33be5e1a4706051fa7e2f301b32a1360c3f57a5103126da38bae8627016"} Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.854100 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" event={"ID":"f6f38306-d4b2-46fa-9c49-8ac276362db8","Type":"ContainerStarted","Data":"7d14cb7cea017b51f0fb88201210b32adfa2b9ece93834ca13773423639585da"} Feb 03 07:25:18 crc kubenswrapper[4708]: I0203 07:25:18.855455 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" event={"ID":"9f166dd2-52e4-473c-9168-c065582fa0e4","Type":"ContainerStarted","Data":"9d559d5afc11037b60b48aea95187b99fa3179ceafa25a5b4471087b4e4615cd"} Feb 03 07:25:24 crc kubenswrapper[4708]: I0203 07:25:24.902701 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" event={"ID":"b5a7b398-66a9-4c39-a940-631bcc804dfe","Type":"ContainerStarted","Data":"850ae368e4688ea913f1d788d40f89d59863178e1bd959c6cda0b3133be61372"} Feb 03 07:25:24 crc kubenswrapper[4708]: I0203 07:25:24.903307 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:25:24 crc kubenswrapper[4708]: I0203 07:25:24.933141 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" podStartSLOduration=38.933126005 podStartE2EDuration="38.933126005s" podCreationTimestamp="2026-02-03 07:24:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:25:24.928436332 +0000 UTC m=+903.910383139" watchObservedRunningTime="2026-02-03 07:25:24.933126005 +0000 UTC m=+903.915072812" Feb 03 07:25:26 crc kubenswrapper[4708]: I0203 07:25:26.201903 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5644b66645-m7mbt" Feb 03 07:25:26 crc kubenswrapper[4708]: I0203 07:25:26.592919 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-586b95b788-gldzv" Feb 03 07:25:28 crc kubenswrapper[4708]: I0203 07:25:28.928548 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" event={"ID":"f6f38306-d4b2-46fa-9c49-8ac276362db8","Type":"ContainerStarted","Data":"866e02b0ceac9ce1bd03e5ed27927b5caada22f15f8523604b32cb901fe19589"} Feb 03 07:25:28 crc kubenswrapper[4708]: I0203 07:25:28.929012 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:25:28 crc kubenswrapper[4708]: I0203 07:25:28.930356 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" event={"ID":"9f166dd2-52e4-473c-9168-c065582fa0e4","Type":"ContainerStarted","Data":"32d356eee14ccb753314d1c4a9d792a5a6cc17f30ef815fe0c1162892272666f"} Feb 03 07:25:28 crc kubenswrapper[4708]: I0203 07:25:28.930482 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:25:28 crc kubenswrapper[4708]: I0203 07:25:28.993134 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" podStartSLOduration=34.167334019 podStartE2EDuration="43.993107111s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:25:18.506606735 +0000 UTC m=+897.488553542" lastFinishedPulling="2026-02-03 07:25:28.332379827 +0000 UTC m=+907.314326634" observedRunningTime="2026-02-03 07:25:28.97074414 +0000 UTC m=+907.952690947" watchObservedRunningTime="2026-02-03 07:25:28.993107111 +0000 UTC m=+907.975053938" Feb 03 07:25:28 crc kubenswrapper[4708]: I0203 07:25:28.996732 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" podStartSLOduration=33.789050593 podStartE2EDuration="43.996716128s" podCreationTimestamp="2026-02-03 07:24:45 +0000 UTC" firstStartedPulling="2026-02-03 07:25:18.126872584 +0000 UTC m=+897.108819401" lastFinishedPulling="2026-02-03 07:25:28.334538129 +0000 UTC m=+907.316484936" observedRunningTime="2026-02-03 07:25:28.988268744 +0000 UTC m=+907.970215581" watchObservedRunningTime="2026-02-03 07:25:28.996716128 +0000 UTC m=+907.978662935" Feb 03 07:25:37 crc kubenswrapper[4708]: I0203 07:25:37.665425 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-79955696d6-btqlz" Feb 03 07:25:38 crc kubenswrapper[4708]: I0203 07:25:38.018829 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62" Feb 03 07:25:38 crc kubenswrapper[4708]: I0203 07:25:38.373006 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-79f7df8fc4-bhfdd" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.835787 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7f7dd"] Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.837448 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.839402 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.839985 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.840213 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.841654 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-8vjdd" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.851477 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7f7dd"] Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.871262 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-flpvw"] Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.872544 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.874998 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.896976 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-flpvw"] Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.999267 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-config\") pod \"dnsmasq-dns-78dd6ddcc-flpvw\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.999328 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkzt7\" (UniqueName: \"kubernetes.io/projected/938f9b73-8b43-416f-a5ca-ebb496a12734-kube-api-access-nkzt7\") pod \"dnsmasq-dns-675f4bcbfc-7f7dd\" (UID: \"938f9b73-8b43-416f-a5ca-ebb496a12734\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.999358 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-flpvw\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.999381 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/938f9b73-8b43-416f-a5ca-ebb496a12734-config\") pod \"dnsmasq-dns-675f4bcbfc-7f7dd\" (UID: \"938f9b73-8b43-416f-a5ca-ebb496a12734\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" Feb 03 07:25:51 crc kubenswrapper[4708]: I0203 07:25:51.999424 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfhnq\" (UniqueName: \"kubernetes.io/projected/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-kube-api-access-kfhnq\") pod \"dnsmasq-dns-78dd6ddcc-flpvw\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.101237 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfhnq\" (UniqueName: \"kubernetes.io/projected/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-kube-api-access-kfhnq\") pod \"dnsmasq-dns-78dd6ddcc-flpvw\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.101404 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-config\") pod \"dnsmasq-dns-78dd6ddcc-flpvw\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.101479 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkzt7\" (UniqueName: \"kubernetes.io/projected/938f9b73-8b43-416f-a5ca-ebb496a12734-kube-api-access-nkzt7\") pod \"dnsmasq-dns-675f4bcbfc-7f7dd\" (UID: \"938f9b73-8b43-416f-a5ca-ebb496a12734\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.101519 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-flpvw\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.101565 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/938f9b73-8b43-416f-a5ca-ebb496a12734-config\") pod \"dnsmasq-dns-675f4bcbfc-7f7dd\" (UID: \"938f9b73-8b43-416f-a5ca-ebb496a12734\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.102455 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-config\") pod \"dnsmasq-dns-78dd6ddcc-flpvw\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.102823 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-flpvw\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.102911 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/938f9b73-8b43-416f-a5ca-ebb496a12734-config\") pod \"dnsmasq-dns-675f4bcbfc-7f7dd\" (UID: \"938f9b73-8b43-416f-a5ca-ebb496a12734\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.126586 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkzt7\" (UniqueName: \"kubernetes.io/projected/938f9b73-8b43-416f-a5ca-ebb496a12734-kube-api-access-nkzt7\") pod \"dnsmasq-dns-675f4bcbfc-7f7dd\" (UID: \"938f9b73-8b43-416f-a5ca-ebb496a12734\") " pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.127273 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfhnq\" (UniqueName: \"kubernetes.io/projected/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-kube-api-access-kfhnq\") pod \"dnsmasq-dns-78dd6ddcc-flpvw\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.160969 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.202443 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.517316 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7f7dd"] Feb 03 07:25:52 crc kubenswrapper[4708]: I0203 07:25:52.645070 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-flpvw"] Feb 03 07:25:52 crc kubenswrapper[4708]: W0203 07:25:52.648382 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4686c6a_4b08_499a_a7d0_ce38a13cf4d1.slice/crio-b66b82ef6f5861c52194b18fa65379c4a19a5057a2cfda702ba61c39a0c594b0 WatchSource:0}: Error finding container b66b82ef6f5861c52194b18fa65379c4a19a5057a2cfda702ba61c39a0c594b0: Status 404 returned error can't find the container with id b66b82ef6f5861c52194b18fa65379c4a19a5057a2cfda702ba61c39a0c594b0 Feb 03 07:25:53 crc kubenswrapper[4708]: I0203 07:25:53.127023 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" event={"ID":"938f9b73-8b43-416f-a5ca-ebb496a12734","Type":"ContainerStarted","Data":"935e217b620def99f0b9b2149bc72d0d9535008b8591a86a635e205c9adc767e"} Feb 03 07:25:53 crc kubenswrapper[4708]: I0203 07:25:53.128836 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" event={"ID":"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1","Type":"ContainerStarted","Data":"b66b82ef6f5861c52194b18fa65379c4a19a5057a2cfda702ba61c39a0c594b0"} Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.643542 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7f7dd"] Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.671701 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gpnt5"] Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.672811 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.680992 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gpnt5"] Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.739162 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-config\") pod \"dnsmasq-dns-666b6646f7-gpnt5\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.739228 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr4f6\" (UniqueName: \"kubernetes.io/projected/c44c6867-b4bc-45f2-9100-cc320788a3c0-kube-api-access-hr4f6\") pod \"dnsmasq-dns-666b6646f7-gpnt5\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.739248 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-dns-svc\") pod \"dnsmasq-dns-666b6646f7-gpnt5\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.841063 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr4f6\" (UniqueName: \"kubernetes.io/projected/c44c6867-b4bc-45f2-9100-cc320788a3c0-kube-api-access-hr4f6\") pod \"dnsmasq-dns-666b6646f7-gpnt5\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.841109 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-dns-svc\") pod \"dnsmasq-dns-666b6646f7-gpnt5\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.841190 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-config\") pod \"dnsmasq-dns-666b6646f7-gpnt5\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.842292 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-config\") pod \"dnsmasq-dns-666b6646f7-gpnt5\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.842640 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-dns-svc\") pod \"dnsmasq-dns-666b6646f7-gpnt5\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.872499 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr4f6\" (UniqueName: \"kubernetes.io/projected/c44c6867-b4bc-45f2-9100-cc320788a3c0-kube-api-access-hr4f6\") pod \"dnsmasq-dns-666b6646f7-gpnt5\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.945260 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-flpvw"] Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.978021 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zwvx2"] Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.994596 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zwvx2"] Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.994833 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:54 crc kubenswrapper[4708]: I0203 07:25:54.999533 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.044003 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-zwvx2\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.044382 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skjtt\" (UniqueName: \"kubernetes.io/projected/193dd272-5329-401f-a02c-0910a3d98246-kube-api-access-skjtt\") pod \"dnsmasq-dns-57d769cc4f-zwvx2\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.044483 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-config\") pod \"dnsmasq-dns-57d769cc4f-zwvx2\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.145289 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-zwvx2\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.145344 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skjtt\" (UniqueName: \"kubernetes.io/projected/193dd272-5329-401f-a02c-0910a3d98246-kube-api-access-skjtt\") pod \"dnsmasq-dns-57d769cc4f-zwvx2\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.145403 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-config\") pod \"dnsmasq-dns-57d769cc4f-zwvx2\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.146384 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-config\") pod \"dnsmasq-dns-57d769cc4f-zwvx2\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.147094 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-zwvx2\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.182489 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skjtt\" (UniqueName: \"kubernetes.io/projected/193dd272-5329-401f-a02c-0910a3d98246-kube-api-access-skjtt\") pod \"dnsmasq-dns-57d769cc4f-zwvx2\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.377080 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.506563 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gpnt5"] Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.622369 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zwvx2"] Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.805482 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.807597 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.811120 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.811346 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.811358 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.811736 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.811868 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-7dqg9" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.812341 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.813249 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.829446 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960321 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0edbabdf-99f1-49b3-83ee-48ad17467638-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960369 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvs4r\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-kube-api-access-cvs4r\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960407 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960429 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960463 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960512 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960529 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960549 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960582 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-config-data\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960600 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:55 crc kubenswrapper[4708]: I0203 07:25:55.960627 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0edbabdf-99f1-49b3-83ee-48ad17467638-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062258 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062317 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062347 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062390 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-config-data\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062411 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062448 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0edbabdf-99f1-49b3-83ee-48ad17467638-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062474 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0edbabdf-99f1-49b3-83ee-48ad17467638-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062499 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvs4r\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-kube-api-access-cvs4r\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062536 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062560 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.062601 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.063389 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.064035 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-config-data\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.064263 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.064473 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.064844 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.065196 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.070314 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.072015 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0edbabdf-99f1-49b3-83ee-48ad17467638-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.075353 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.087283 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0edbabdf-99f1-49b3-83ee-48ad17467638-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.088682 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvs4r\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-kube-api-access-cvs4r\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.122068 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.136961 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.143711 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.144862 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.144946 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.151353 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.151456 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.151696 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jdccx" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.151768 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.152141 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.152445 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.153625 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.173861 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" event={"ID":"c44c6867-b4bc-45f2-9100-cc320788a3c0","Type":"ContainerStarted","Data":"2cb623c3e8fd4e358655177877eabdbb14a17788a4cf2008f334d49506b6f8e4"} Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.175571 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" event={"ID":"193dd272-5329-401f-a02c-0910a3d98246","Type":"ContainerStarted","Data":"7540af77898882824276bd81d425eba4b44f42f1865e23b7fff09424b4c51a15"} Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.264865 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.264916 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.264942 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.264958 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.264979 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.265029 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.265070 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.265087 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.265117 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs6rv\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-kube-api-access-rs6rv\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.265146 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.265174 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368500 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs6rv\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-kube-api-access-rs6rv\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368554 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368652 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368694 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368711 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368734 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368749 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368766 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368816 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368852 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.368869 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.371330 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.371520 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.374816 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.376940 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.377247 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.377329 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.377997 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.381610 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.382179 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.382855 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.405903 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs6rv\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-kube-api-access-rs6rv\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.422663 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:56 crc kubenswrapper[4708]: I0203 07:25:56.500921 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.459618 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.461767 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.465202 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.466428 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.466576 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-wwwlf" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.467318 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.489480 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.489545 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.591061 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.591150 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-config-data-default\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.591180 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.591203 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.591239 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw97g\" (UniqueName: \"kubernetes.io/projected/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-kube-api-access-vw97g\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.591277 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.591296 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-kolla-config\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.591315 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.693101 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw97g\" (UniqueName: \"kubernetes.io/projected/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-kube-api-access-vw97g\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.693180 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.693207 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-kolla-config\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.693225 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.693253 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.693280 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-config-data-default\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.693305 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.693325 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.693553 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.694350 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-kolla-config\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.695363 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.695494 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.696661 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-config-data-default\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.701445 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.703177 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.710536 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw97g\" (UniqueName: \"kubernetes.io/projected/5f6fa285-4374-4be5-b4cf-e3dd8ef56762-kube-api-access-vw97g\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.721435 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"5f6fa285-4374-4be5-b4cf-e3dd8ef56762\") " pod="openstack/openstack-galera-0" Feb 03 07:25:57 crc kubenswrapper[4708]: I0203 07:25:57.791152 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.355464 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pxgzt"] Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.357148 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.361867 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pxgzt"] Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.504396 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-catalog-content\") pod \"redhat-operators-pxgzt\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.504467 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-utilities\") pod \"redhat-operators-pxgzt\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.504502 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8bf4\" (UniqueName: \"kubernetes.io/projected/271e64f1-20a8-42ad-962a-0b498d561cdd-kube-api-access-r8bf4\") pod \"redhat-operators-pxgzt\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.605858 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-catalog-content\") pod \"redhat-operators-pxgzt\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.605911 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-utilities\") pod \"redhat-operators-pxgzt\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.605938 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8bf4\" (UniqueName: \"kubernetes.io/projected/271e64f1-20a8-42ad-962a-0b498d561cdd-kube-api-access-r8bf4\") pod \"redhat-operators-pxgzt\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.606425 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-catalog-content\") pod \"redhat-operators-pxgzt\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.606532 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-utilities\") pod \"redhat-operators-pxgzt\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.622617 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8bf4\" (UniqueName: \"kubernetes.io/projected/271e64f1-20a8-42ad-962a-0b498d561cdd-kube-api-access-r8bf4\") pod \"redhat-operators-pxgzt\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.685740 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.954159 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.955676 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.957700 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.958531 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-9smzd" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.966451 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Feb 03 07:25:58 crc kubenswrapper[4708]: I0203 07:25:58.970741 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.003075 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.113537 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs9g7\" (UniqueName: \"kubernetes.io/projected/8484d145-abd4-4112-b81c-338bf4d9285f-kube-api-access-gs9g7\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.113836 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8484d145-abd4-4112-b81c-338bf4d9285f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.113932 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8484d145-abd4-4112-b81c-338bf4d9285f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.114037 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.114127 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8484d145-abd4-4112-b81c-338bf4d9285f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.114202 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8484d145-abd4-4112-b81c-338bf4d9285f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.114303 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8484d145-abd4-4112-b81c-338bf4d9285f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.114374 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8484d145-abd4-4112-b81c-338bf4d9285f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.190774 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.191689 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.194043 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-7jt9d" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.194295 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.194445 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.212263 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.215448 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.215510 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8484d145-abd4-4112-b81c-338bf4d9285f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.215548 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8484d145-abd4-4112-b81c-338bf4d9285f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.215595 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8484d145-abd4-4112-b81c-338bf4d9285f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.215612 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8484d145-abd4-4112-b81c-338bf4d9285f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.215656 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs9g7\" (UniqueName: \"kubernetes.io/projected/8484d145-abd4-4112-b81c-338bf4d9285f-kube-api-access-gs9g7\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.215736 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8484d145-abd4-4112-b81c-338bf4d9285f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.215755 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8484d145-abd4-4112-b81c-338bf4d9285f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.216673 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8484d145-abd4-4112-b81c-338bf4d9285f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.216762 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.217091 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8484d145-abd4-4112-b81c-338bf4d9285f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.217096 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8484d145-abd4-4112-b81c-338bf4d9285f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.217444 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8484d145-abd4-4112-b81c-338bf4d9285f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.220828 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8484d145-abd4-4112-b81c-338bf4d9285f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.221430 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8484d145-abd4-4112-b81c-338bf4d9285f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.269940 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.272565 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs9g7\" (UniqueName: \"kubernetes.io/projected/8484d145-abd4-4112-b81c-338bf4d9285f-kube-api-access-gs9g7\") pod \"openstack-cell1-galera-0\" (UID: \"8484d145-abd4-4112-b81c-338bf4d9285f\") " pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.284969 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.321773 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g7p8\" (UniqueName: \"kubernetes.io/projected/98eee8d5-f15e-4add-86d3-d19f15018230-kube-api-access-4g7p8\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.322121 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98eee8d5-f15e-4add-86d3-d19f15018230-combined-ca-bundle\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.322269 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98eee8d5-f15e-4add-86d3-d19f15018230-config-data\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.322393 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/98eee8d5-f15e-4add-86d3-d19f15018230-kolla-config\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.322510 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/98eee8d5-f15e-4add-86d3-d19f15018230-memcached-tls-certs\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.423949 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g7p8\" (UniqueName: \"kubernetes.io/projected/98eee8d5-f15e-4add-86d3-d19f15018230-kube-api-access-4g7p8\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.424222 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98eee8d5-f15e-4add-86d3-d19f15018230-combined-ca-bundle\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.424261 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98eee8d5-f15e-4add-86d3-d19f15018230-config-data\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.424336 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/98eee8d5-f15e-4add-86d3-d19f15018230-kolla-config\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.424387 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/98eee8d5-f15e-4add-86d3-d19f15018230-memcached-tls-certs\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.424968 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/98eee8d5-f15e-4add-86d3-d19f15018230-kolla-config\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.425136 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/98eee8d5-f15e-4add-86d3-d19f15018230-config-data\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.429161 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98eee8d5-f15e-4add-86d3-d19f15018230-combined-ca-bundle\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.429182 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/98eee8d5-f15e-4add-86d3-d19f15018230-memcached-tls-certs\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.443294 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g7p8\" (UniqueName: \"kubernetes.io/projected/98eee8d5-f15e-4add-86d3-d19f15018230-kube-api-access-4g7p8\") pod \"memcached-0\" (UID: \"98eee8d5-f15e-4add-86d3-d19f15018230\") " pod="openstack/memcached-0" Feb 03 07:25:59 crc kubenswrapper[4708]: I0203 07:25:59.563360 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.048730 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.049917 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.052401 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-rsjj2" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.062263 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.183216 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx6p5\" (UniqueName: \"kubernetes.io/projected/d79b40f9-049c-46ea-8ade-f43e58bc8cd4-kube-api-access-mx6p5\") pod \"kube-state-metrics-0\" (UID: \"d79b40f9-049c-46ea-8ade-f43e58bc8cd4\") " pod="openstack/kube-state-metrics-0" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.284500 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx6p5\" (UniqueName: \"kubernetes.io/projected/d79b40f9-049c-46ea-8ade-f43e58bc8cd4-kube-api-access-mx6p5\") pod \"kube-state-metrics-0\" (UID: \"d79b40f9-049c-46ea-8ade-f43e58bc8cd4\") " pod="openstack/kube-state-metrics-0" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.302950 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx6p5\" (UniqueName: \"kubernetes.io/projected/d79b40f9-049c-46ea-8ade-f43e58bc8cd4-kube-api-access-mx6p5\") pod \"kube-state-metrics-0\" (UID: \"d79b40f9-049c-46ea-8ade-f43e58bc8cd4\") " pod="openstack/kube-state-metrics-0" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.368664 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.729275 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ppsk2"] Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.731251 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.749081 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ppsk2"] Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.893898 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-catalog-content\") pod \"certified-operators-ppsk2\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.894261 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtkwj\" (UniqueName: \"kubernetes.io/projected/0f490b43-b9f5-4e99-89a1-e7c75a45b487-kube-api-access-vtkwj\") pod \"certified-operators-ppsk2\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.894299 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-utilities\") pod \"certified-operators-ppsk2\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.995294 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtkwj\" (UniqueName: \"kubernetes.io/projected/0f490b43-b9f5-4e99-89a1-e7c75a45b487-kube-api-access-vtkwj\") pod \"certified-operators-ppsk2\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.995337 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-utilities\") pod \"certified-operators-ppsk2\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.995448 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-catalog-content\") pod \"certified-operators-ppsk2\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.995980 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-catalog-content\") pod \"certified-operators-ppsk2\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:01 crc kubenswrapper[4708]: I0203 07:26:01.996038 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-utilities\") pod \"certified-operators-ppsk2\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:02 crc kubenswrapper[4708]: I0203 07:26:02.013473 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtkwj\" (UniqueName: \"kubernetes.io/projected/0f490b43-b9f5-4e99-89a1-e7c75a45b487-kube-api-access-vtkwj\") pod \"certified-operators-ppsk2\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:02 crc kubenswrapper[4708]: I0203 07:26:02.055046 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.201419 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-pb4xp"] Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.202882 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.206137 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.206189 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-jjl8j" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.206424 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.225294 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-48bcs"] Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.227473 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.231422 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-pb4xp"] Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.239272 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-48bcs"] Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.328922 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-scripts\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.328997 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3b5a2d58-5ebb-4838-a798-bc280fe99951-var-run\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329032 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b5a2d58-5ebb-4838-a798-bc280fe99951-scripts\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329059 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz2mk\" (UniqueName: \"kubernetes.io/projected/3b5a2d58-5ebb-4838-a798-bc280fe99951-kube-api-access-pz2mk\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329234 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5a2d58-5ebb-4838-a798-bc280fe99951-combined-ca-bundle\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329345 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3b5a2d58-5ebb-4838-a798-bc280fe99951-var-run-ovn\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329439 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-var-log\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329460 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d42kc\" (UniqueName: \"kubernetes.io/projected/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-kube-api-access-d42kc\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329532 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-etc-ovs\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329582 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-var-lib\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329604 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3b5a2d58-5ebb-4838-a798-bc280fe99951-var-log-ovn\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329630 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-var-run\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.329698 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b5a2d58-5ebb-4838-a798-bc280fe99951-ovn-controller-tls-certs\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.436119 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-etc-ovs\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.436189 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-var-lib\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.436559 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3b5a2d58-5ebb-4838-a798-bc280fe99951-var-log-ovn\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.436769 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-var-lib\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.436883 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3b5a2d58-5ebb-4838-a798-bc280fe99951-var-log-ovn\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437049 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-etc-ovs\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437071 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-var-run\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.436585 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-var-run\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437206 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b5a2d58-5ebb-4838-a798-bc280fe99951-ovn-controller-tls-certs\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437293 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-scripts\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437350 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3b5a2d58-5ebb-4838-a798-bc280fe99951-var-run\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437376 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b5a2d58-5ebb-4838-a798-bc280fe99951-scripts\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437401 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz2mk\" (UniqueName: \"kubernetes.io/projected/3b5a2d58-5ebb-4838-a798-bc280fe99951-kube-api-access-pz2mk\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437443 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5a2d58-5ebb-4838-a798-bc280fe99951-combined-ca-bundle\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437505 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3b5a2d58-5ebb-4838-a798-bc280fe99951-var-run-ovn\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437565 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-var-log\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437583 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d42kc\" (UniqueName: \"kubernetes.io/projected/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-kube-api-access-d42kc\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437940 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3b5a2d58-5ebb-4838-a798-bc280fe99951-var-run-ovn\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.437983 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-var-log\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.438027 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3b5a2d58-5ebb-4838-a798-bc280fe99951-var-run\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.440007 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-scripts\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.441193 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b5a2d58-5ebb-4838-a798-bc280fe99951-scripts\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.448272 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5a2d58-5ebb-4838-a798-bc280fe99951-combined-ca-bundle\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.453665 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b5a2d58-5ebb-4838-a798-bc280fe99951-ovn-controller-tls-certs\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.454981 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz2mk\" (UniqueName: \"kubernetes.io/projected/3b5a2d58-5ebb-4838-a798-bc280fe99951-kube-api-access-pz2mk\") pod \"ovn-controller-pb4xp\" (UID: \"3b5a2d58-5ebb-4838-a798-bc280fe99951\") " pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.455413 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d42kc\" (UniqueName: \"kubernetes.io/projected/6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024-kube-api-access-d42kc\") pod \"ovn-controller-ovs-48bcs\" (UID: \"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024\") " pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.533207 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.551043 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.732359 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4hkfj"] Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.735427 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.737934 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4hkfj"] Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.843118 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd2bn\" (UniqueName: \"kubernetes.io/projected/69f5f6ed-0270-407f-9b23-68c954638cb1-kube-api-access-bd2bn\") pod \"community-operators-4hkfj\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.843156 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-utilities\") pod \"community-operators-4hkfj\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.843221 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-catalog-content\") pod \"community-operators-4hkfj\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.944988 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd2bn\" (UniqueName: \"kubernetes.io/projected/69f5f6ed-0270-407f-9b23-68c954638cb1-kube-api-access-bd2bn\") pod \"community-operators-4hkfj\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.945045 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-utilities\") pod \"community-operators-4hkfj\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.945128 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-catalog-content\") pod \"community-operators-4hkfj\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.945851 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-utilities\") pod \"community-operators-4hkfj\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.945900 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-catalog-content\") pod \"community-operators-4hkfj\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:04 crc kubenswrapper[4708]: I0203 07:26:04.965965 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd2bn\" (UniqueName: \"kubernetes.io/projected/69f5f6ed-0270-407f-9b23-68c954638cb1-kube-api-access-bd2bn\") pod \"community-operators-4hkfj\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:05 crc kubenswrapper[4708]: I0203 07:26:05.052406 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.767683 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.769939 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.776488 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.776889 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.777025 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.777146 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.777260 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-wz8rl" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.788986 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.881466 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-config\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.881524 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.881724 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.881974 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.882004 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.882092 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.882197 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hjjx\" (UniqueName: \"kubernetes.io/projected/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-kube-api-access-8hjjx\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.882234 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.983995 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.984043 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.984086 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.984147 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hjjx\" (UniqueName: \"kubernetes.io/projected/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-kube-api-access-8hjjx\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.984174 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.984205 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-config\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.984239 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.984276 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.984618 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.984595 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.985410 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-config\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.985429 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.990721 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.990924 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:06 crc kubenswrapper[4708]: I0203 07:26:06.995133 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.008021 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hjjx\" (UniqueName: \"kubernetes.io/projected/6c70c9bb-deb5-45aa-96e6-aea4e711f93a-kube-api-access-8hjjx\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.023748 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"6c70c9bb-deb5-45aa-96e6-aea4e711f93a\") " pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.104305 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.580540 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.582242 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.584867 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.585070 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.584867 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.585333 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-7jc52" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.595987 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.698957 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpgnq\" (UniqueName: \"kubernetes.io/projected/d09d6786-b507-4848-977f-a5e94b77d0ad-kube-api-access-xpgnq\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.699007 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09d6786-b507-4848-977f-a5e94b77d0ad-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.699070 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d09d6786-b507-4848-977f-a5e94b77d0ad-config\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.699102 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.699158 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d09d6786-b507-4848-977f-a5e94b77d0ad-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.699185 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09d6786-b507-4848-977f-a5e94b77d0ad-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.699204 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09d6786-b507-4848-977f-a5e94b77d0ad-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.699231 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d09d6786-b507-4848-977f-a5e94b77d0ad-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.800450 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpgnq\" (UniqueName: \"kubernetes.io/projected/d09d6786-b507-4848-977f-a5e94b77d0ad-kube-api-access-xpgnq\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.800529 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09d6786-b507-4848-977f-a5e94b77d0ad-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.800550 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d09d6786-b507-4848-977f-a5e94b77d0ad-config\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.800597 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.800955 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.801098 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d09d6786-b507-4848-977f-a5e94b77d0ad-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.801150 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09d6786-b507-4848-977f-a5e94b77d0ad-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.801170 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09d6786-b507-4848-977f-a5e94b77d0ad-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.801198 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d09d6786-b507-4848-977f-a5e94b77d0ad-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.801513 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d09d6786-b507-4848-977f-a5e94b77d0ad-config\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.801618 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d09d6786-b507-4848-977f-a5e94b77d0ad-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.802431 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d09d6786-b507-4848-977f-a5e94b77d0ad-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.807908 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09d6786-b507-4848-977f-a5e94b77d0ad-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.815524 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09d6786-b507-4848-977f-a5e94b77d0ad-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.815841 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09d6786-b507-4848-977f-a5e94b77d0ad-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.822937 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpgnq\" (UniqueName: \"kubernetes.io/projected/d09d6786-b507-4848-977f-a5e94b77d0ad-kube-api-access-xpgnq\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.826369 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d09d6786-b507-4848-977f-a5e94b77d0ad\") " pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.859664 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:26:07 crc kubenswrapper[4708]: I0203 07:26:07.904664 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:11 crc kubenswrapper[4708]: I0203 07:26:11.961707 4708 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:26:12 crc kubenswrapper[4708]: I0203 07:26:12.175452 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:26:12 crc kubenswrapper[4708]: I0203 07:26:12.334619 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d79b40f9-049c-46ea-8ade-f43e58bc8cd4","Type":"ContainerStarted","Data":"84179dc416c94c5131983ff97416f553c165cf9deff138a5c5b699e8433a6643"} Feb 03 07:26:12 crc kubenswrapper[4708]: E0203 07:26:12.907870 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 03 07:26:12 crc kubenswrapper[4708]: E0203 07:26:12.908047 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nkzt7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-7f7dd_openstack(938f9b73-8b43-416f-a5ca-ebb496a12734): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:26:12 crc kubenswrapper[4708]: E0203 07:26:12.909303 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" podUID="938f9b73-8b43-416f-a5ca-ebb496a12734" Feb 03 07:26:12 crc kubenswrapper[4708]: E0203 07:26:12.955991 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 03 07:26:12 crc kubenswrapper[4708]: E0203 07:26:12.956509 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kfhnq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-flpvw_openstack(a4686c6a-4b08-499a-a7d0-ce38a13cf4d1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:26:12 crc kubenswrapper[4708]: E0203 07:26:12.958087 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" podUID="a4686c6a-4b08-499a-a7d0-ce38a13cf4d1" Feb 03 07:26:13 crc kubenswrapper[4708]: I0203 07:26:13.350264 4708 generic.go:334] "Generic (PLEG): container finished" podID="c44c6867-b4bc-45f2-9100-cc320788a3c0" containerID="3297b6d569921f53e586543d859b93af3cec53981be07cb89e4dde63179f6387" exitCode=0 Feb 03 07:26:13 crc kubenswrapper[4708]: I0203 07:26:13.350362 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" event={"ID":"c44c6867-b4bc-45f2-9100-cc320788a3c0","Type":"ContainerDied","Data":"3297b6d569921f53e586543d859b93af3cec53981be07cb89e4dde63179f6387"} Feb 03 07:26:13 crc kubenswrapper[4708]: I0203 07:26:13.352129 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2572b4b8-5df3-4d81-9bd7-8ef427c6d945","Type":"ContainerStarted","Data":"d7256cde6d9ca3b4018e771c6adfcdb68cb7b560f0574609748d8c7f053af2d8"} Feb 03 07:26:13 crc kubenswrapper[4708]: I0203 07:26:13.390905 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ppsk2"] Feb 03 07:26:13 crc kubenswrapper[4708]: I0203 07:26:13.813520 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 03 07:26:13 crc kubenswrapper[4708]: I0203 07:26:13.820703 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pxgzt"] Feb 03 07:26:13 crc kubenswrapper[4708]: I0203 07:26:13.857853 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:26:13 crc kubenswrapper[4708]: I0203 07:26:13.866129 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 03 07:26:13 crc kubenswrapper[4708]: W0203 07:26:13.907228 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod271e64f1_20a8_42ad_962a_0b498d561cdd.slice/crio-32b4a0a3d807373d61294a63e9ac3f78cbabb3d70be5674661a956eb8df37478 WatchSource:0}: Error finding container 32b4a0a3d807373d61294a63e9ac3f78cbabb3d70be5674661a956eb8df37478: Status 404 returned error can't find the container with id 32b4a0a3d807373d61294a63e9ac3f78cbabb3d70be5674661a956eb8df37478 Feb 03 07:26:13 crc kubenswrapper[4708]: W0203 07:26:13.917196 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8484d145_abd4_4112_b81c_338bf4d9285f.slice/crio-2f9238d26af81caed02a2cdac194f93f2553902055d3e163823536d4283478b2 WatchSource:0}: Error finding container 2f9238d26af81caed02a2cdac194f93f2553902055d3e163823536d4283478b2: Status 404 returned error can't find the container with id 2f9238d26af81caed02a2cdac194f93f2553902055d3e163823536d4283478b2 Feb 03 07:26:13 crc kubenswrapper[4708]: W0203 07:26:13.920643 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0edbabdf_99f1_49b3_83ee_48ad17467638.slice/crio-052056c6b202ab852ba55e65fe4f4584d238854795d5920d3f5201d72f512039 WatchSource:0}: Error finding container 052056c6b202ab852ba55e65fe4f4584d238854795d5920d3f5201d72f512039: Status 404 returned error can't find the container with id 052056c6b202ab852ba55e65fe4f4584d238854795d5920d3f5201d72f512039 Feb 03 07:26:13 crc kubenswrapper[4708]: I0203 07:26:13.992757 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:26:13 crc kubenswrapper[4708]: I0203 07:26:13.994738 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.019974 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4hkfj"] Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.069828 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.101972 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-config\") pod \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.102313 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/938f9b73-8b43-416f-a5ca-ebb496a12734-config\") pod \"938f9b73-8b43-416f-a5ca-ebb496a12734\" (UID: \"938f9b73-8b43-416f-a5ca-ebb496a12734\") " Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.102475 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfhnq\" (UniqueName: \"kubernetes.io/projected/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-kube-api-access-kfhnq\") pod \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.102592 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkzt7\" (UniqueName: \"kubernetes.io/projected/938f9b73-8b43-416f-a5ca-ebb496a12734-kube-api-access-nkzt7\") pod \"938f9b73-8b43-416f-a5ca-ebb496a12734\" (UID: \"938f9b73-8b43-416f-a5ca-ebb496a12734\") " Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.102701 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-dns-svc\") pod \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\" (UID: \"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1\") " Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.103239 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a4686c6a-4b08-499a-a7d0-ce38a13cf4d1" (UID: "a4686c6a-4b08-499a-a7d0-ce38a13cf4d1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.103261 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-config" (OuterVolumeSpecName: "config") pod "a4686c6a-4b08-499a-a7d0-ce38a13cf4d1" (UID: "a4686c6a-4b08-499a-a7d0-ce38a13cf4d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.103248 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/938f9b73-8b43-416f-a5ca-ebb496a12734-config" (OuterVolumeSpecName: "config") pod "938f9b73-8b43-416f-a5ca-ebb496a12734" (UID: "938f9b73-8b43-416f-a5ca-ebb496a12734"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.118879 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-kube-api-access-kfhnq" (OuterVolumeSpecName: "kube-api-access-kfhnq") pod "a4686c6a-4b08-499a-a7d0-ce38a13cf4d1" (UID: "a4686c6a-4b08-499a-a7d0-ce38a13cf4d1"). InnerVolumeSpecName "kube-api-access-kfhnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.135351 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/938f9b73-8b43-416f-a5ca-ebb496a12734-kube-api-access-nkzt7" (OuterVolumeSpecName: "kube-api-access-nkzt7") pod "938f9b73-8b43-416f-a5ca-ebb496a12734" (UID: "938f9b73-8b43-416f-a5ca-ebb496a12734"). InnerVolumeSpecName "kube-api-access-nkzt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.201337 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-pb4xp"] Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.204409 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/938f9b73-8b43-416f-a5ca-ebb496a12734-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.204447 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfhnq\" (UniqueName: \"kubernetes.io/projected/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-kube-api-access-kfhnq\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.204458 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkzt7\" (UniqueName: \"kubernetes.io/projected/938f9b73-8b43-416f-a5ca-ebb496a12734-kube-api-access-nkzt7\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.204468 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.204477 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.205025 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.303722 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-48bcs"] Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.359713 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0edbabdf-99f1-49b3-83ee-48ad17467638","Type":"ContainerStarted","Data":"052056c6b202ab852ba55e65fe4f4584d238854795d5920d3f5201d72f512039"} Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.361654 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"98eee8d5-f15e-4add-86d3-d19f15018230","Type":"ContainerStarted","Data":"e131af6e557aba9344c565d0c8cb55ede05bea1c085fbc457aba7bf9948c7a73"} Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.362994 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ppsk2" event={"ID":"0f490b43-b9f5-4e99-89a1-e7c75a45b487","Type":"ContainerStarted","Data":"ba5175defb60be7bc96caf3cc589c565bb0a02fce9e9f67f1c3b4ef0073cc233"} Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.364632 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxgzt" event={"ID":"271e64f1-20a8-42ad-962a-0b498d561cdd","Type":"ContainerStarted","Data":"32b4a0a3d807373d61294a63e9ac3f78cbabb3d70be5674661a956eb8df37478"} Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.365577 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" event={"ID":"938f9b73-8b43-416f-a5ca-ebb496a12734","Type":"ContainerDied","Data":"935e217b620def99f0b9b2149bc72d0d9535008b8591a86a635e205c9adc767e"} Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.365648 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-7f7dd" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.368809 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8484d145-abd4-4112-b81c-338bf4d9285f","Type":"ContainerStarted","Data":"2f9238d26af81caed02a2cdac194f93f2553902055d3e163823536d4283478b2"} Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.370126 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" event={"ID":"a4686c6a-4b08-499a-a7d0-ce38a13cf4d1","Type":"ContainerDied","Data":"b66b82ef6f5861c52194b18fa65379c4a19a5057a2cfda702ba61c39a0c594b0"} Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.370135 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-flpvw" Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.372863 4708 generic.go:334] "Generic (PLEG): container finished" podID="193dd272-5329-401f-a02c-0910a3d98246" containerID="e80ac35c6994a560cc7047837a599d2d417780a770391ffe660af4d71213783d" exitCode=0 Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.372905 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" event={"ID":"193dd272-5329-401f-a02c-0910a3d98246","Type":"ContainerDied","Data":"e80ac35c6994a560cc7047837a599d2d417780a770391ffe660af4d71213783d"} Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.429498 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7f7dd"] Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.459436 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-7f7dd"] Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.496423 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-flpvw"] Feb 03 07:26:14 crc kubenswrapper[4708]: I0203 07:26:14.504876 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-flpvw"] Feb 03 07:26:14 crc kubenswrapper[4708]: E0203 07:26:14.949004 4708 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Feb 03 07:26:14 crc kubenswrapper[4708]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/c44c6867-b4bc-45f2-9100-cc320788a3c0/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Feb 03 07:26:14 crc kubenswrapper[4708]: > podSandboxID="2cb623c3e8fd4e358655177877eabdbb14a17788a4cf2008f334d49506b6f8e4" Feb 03 07:26:14 crc kubenswrapper[4708]: E0203 07:26:14.949495 4708 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 03 07:26:14 crc kubenswrapper[4708]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hr4f6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-gpnt5_openstack(c44c6867-b4bc-45f2-9100-cc320788a3c0): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/c44c6867-b4bc-45f2-9100-cc320788a3c0/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Feb 03 07:26:14 crc kubenswrapper[4708]: > logger="UnhandledError" Feb 03 07:26:14 crc kubenswrapper[4708]: E0203 07:26:14.950720 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/c44c6867-b4bc-45f2-9100-cc320788a3c0/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" podUID="c44c6867-b4bc-45f2-9100-cc320788a3c0" Feb 03 07:26:15 crc kubenswrapper[4708]: I0203 07:26:15.022504 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 03 07:26:15 crc kubenswrapper[4708]: I0203 07:26:15.390532 4708 generic.go:334] "Generic (PLEG): container finished" podID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerID="9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588" exitCode=0 Feb 03 07:26:15 crc kubenswrapper[4708]: I0203 07:26:15.390631 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ppsk2" event={"ID":"0f490b43-b9f5-4e99-89a1-e7c75a45b487","Type":"ContainerDied","Data":"9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588"} Feb 03 07:26:15 crc kubenswrapper[4708]: I0203 07:26:15.395202 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6c70c9bb-deb5-45aa-96e6-aea4e711f93a","Type":"ContainerStarted","Data":"79dca664fc654c143f399d62c67fec7fe04ee1c956c3ad62a08b3aecd6fe538c"} Feb 03 07:26:15 crc kubenswrapper[4708]: I0203 07:26:15.399465 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5f6fa285-4374-4be5-b4cf-e3dd8ef56762","Type":"ContainerStarted","Data":"96e0f742ee0992d472c68110ea64642d4798d53f1f88c5a5228058ce22ba790f"} Feb 03 07:26:15 crc kubenswrapper[4708]: I0203 07:26:15.400771 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pb4xp" event={"ID":"3b5a2d58-5ebb-4838-a798-bc280fe99951","Type":"ContainerStarted","Data":"b84eb938989ae0d838f81b202b77670e73f059d7044e50edf3347466a97ee6a6"} Feb 03 07:26:15 crc kubenswrapper[4708]: I0203 07:26:15.402236 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-48bcs" event={"ID":"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024","Type":"ContainerStarted","Data":"1dae1f587c6b66c1a5c069ccb53864b35850f4bc62329f5019759e1acea705f9"} Feb 03 07:26:15 crc kubenswrapper[4708]: I0203 07:26:15.404748 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4hkfj" event={"ID":"69f5f6ed-0270-407f-9b23-68c954638cb1","Type":"ContainerStarted","Data":"7fc80a95814304ba06eff0539fda05d80ddf3d74ef8e1f6d12bfa75b6e9f8cba"} Feb 03 07:26:16 crc kubenswrapper[4708]: I0203 07:26:16.103228 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="938f9b73-8b43-416f-a5ca-ebb496a12734" path="/var/lib/kubelet/pods/938f9b73-8b43-416f-a5ca-ebb496a12734/volumes" Feb 03 07:26:16 crc kubenswrapper[4708]: I0203 07:26:16.103880 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4686c6a-4b08-499a-a7d0-ce38a13cf4d1" path="/var/lib/kubelet/pods/a4686c6a-4b08-499a-a7d0-ce38a13cf4d1/volumes" Feb 03 07:26:16 crc kubenswrapper[4708]: I0203 07:26:16.852239 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-v89mq"] Feb 03 07:26:16 crc kubenswrapper[4708]: I0203 07:26:16.854534 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:16 crc kubenswrapper[4708]: I0203 07:26:16.862819 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-v89mq"] Feb 03 07:26:16 crc kubenswrapper[4708]: I0203 07:26:16.957984 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-utilities\") pod \"redhat-marketplace-v89mq\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:16 crc kubenswrapper[4708]: I0203 07:26:16.958036 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-catalog-content\") pod \"redhat-marketplace-v89mq\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:16 crc kubenswrapper[4708]: I0203 07:26:16.958175 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkcv4\" (UniqueName: \"kubernetes.io/projected/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-kube-api-access-zkcv4\") pod \"redhat-marketplace-v89mq\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:17 crc kubenswrapper[4708]: I0203 07:26:17.059807 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkcv4\" (UniqueName: \"kubernetes.io/projected/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-kube-api-access-zkcv4\") pod \"redhat-marketplace-v89mq\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:17 crc kubenswrapper[4708]: I0203 07:26:17.059948 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-utilities\") pod \"redhat-marketplace-v89mq\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:17 crc kubenswrapper[4708]: I0203 07:26:17.059972 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-catalog-content\") pod \"redhat-marketplace-v89mq\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:17 crc kubenswrapper[4708]: I0203 07:26:17.060429 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-catalog-content\") pod \"redhat-marketplace-v89mq\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:17 crc kubenswrapper[4708]: I0203 07:26:17.060509 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-utilities\") pod \"redhat-marketplace-v89mq\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:17 crc kubenswrapper[4708]: I0203 07:26:17.078281 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkcv4\" (UniqueName: \"kubernetes.io/projected/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-kube-api-access-zkcv4\") pod \"redhat-marketplace-v89mq\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:17 crc kubenswrapper[4708]: I0203 07:26:17.177666 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:17 crc kubenswrapper[4708]: W0203 07:26:17.275554 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd09d6786_b507_4848_977f_a5e94b77d0ad.slice/crio-28e12c5c2966fe9b149f8c577c690725e0bcef555f218f7f57b8ec67930485f3 WatchSource:0}: Error finding container 28e12c5c2966fe9b149f8c577c690725e0bcef555f218f7f57b8ec67930485f3: Status 404 returned error can't find the container with id 28e12c5c2966fe9b149f8c577c690725e0bcef555f218f7f57b8ec67930485f3 Feb 03 07:26:17 crc kubenswrapper[4708]: I0203 07:26:17.432839 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d09d6786-b507-4848-977f-a5e94b77d0ad","Type":"ContainerStarted","Data":"28e12c5c2966fe9b149f8c577c690725e0bcef555f218f7f57b8ec67930485f3"} Feb 03 07:26:18 crc kubenswrapper[4708]: I0203 07:26:18.447935 4708 generic.go:334] "Generic (PLEG): container finished" podID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerID="4e31452224840996574cce8f443004fa472717dafc618452ca4d80aeab7c15ad" exitCode=0 Feb 03 07:26:18 crc kubenswrapper[4708]: I0203 07:26:18.447991 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxgzt" event={"ID":"271e64f1-20a8-42ad-962a-0b498d561cdd","Type":"ContainerDied","Data":"4e31452224840996574cce8f443004fa472717dafc618452ca4d80aeab7c15ad"} Feb 03 07:26:22 crc kubenswrapper[4708]: I0203 07:26:22.493525 4708 generic.go:334] "Generic (PLEG): container finished" podID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerID="bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde" exitCode=0 Feb 03 07:26:22 crc kubenswrapper[4708]: I0203 07:26:22.494730 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4hkfj" event={"ID":"69f5f6ed-0270-407f-9b23-68c954638cb1","Type":"ContainerDied","Data":"bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde"} Feb 03 07:26:28 crc kubenswrapper[4708]: I0203 07:26:28.444454 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-v89mq"] Feb 03 07:26:28 crc kubenswrapper[4708]: W0203 07:26:28.472772 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a7c44b1_d3a9_43f3_8574_ecffdeaaaf40.slice/crio-41e42fb54a8c7af5e4d9ba64c36cf346dda48ec943ac2fd1c592ba1563e5995d WatchSource:0}: Error finding container 41e42fb54a8c7af5e4d9ba64c36cf346dda48ec943ac2fd1c592ba1563e5995d: Status 404 returned error can't find the container with id 41e42fb54a8c7af5e4d9ba64c36cf346dda48ec943ac2fd1c592ba1563e5995d Feb 03 07:26:28 crc kubenswrapper[4708]: I0203 07:26:28.541500 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v89mq" event={"ID":"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40","Type":"ContainerStarted","Data":"41e42fb54a8c7af5e4d9ba64c36cf346dda48ec943ac2fd1c592ba1563e5995d"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.551851 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8484d145-abd4-4112-b81c-338bf4d9285f","Type":"ContainerStarted","Data":"688ee5050169079715ea82cb5b9709b12774727cb5c229d24a225ed756c8b252"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.554228 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" event={"ID":"193dd272-5329-401f-a02c-0910a3d98246","Type":"ContainerStarted","Data":"ac4da0274520dc56183b8454cca97c4326ecceaaeca78b26a1e9deae45c58aa8"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.554758 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.556774 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d09d6786-b507-4848-977f-a5e94b77d0ad","Type":"ContainerStarted","Data":"4dadda4ad1e29c7ce9261a36c0969fac372ff96ce32c311b56dd1331d7ff2caa"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.558565 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-48bcs" event={"ID":"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024","Type":"ContainerStarted","Data":"5e28d239b9e26bb9b74c613b94aa56ec9a20a6664cb06eefa3bb1a639a9fcb1d"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.560043 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"98eee8d5-f15e-4add-86d3-d19f15018230","Type":"ContainerStarted","Data":"462247541a204ba5e479ec7e183e44b4e06ade4f45b38f32782bda2170d10164"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.560181 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.564640 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6c70c9bb-deb5-45aa-96e6-aea4e711f93a","Type":"ContainerStarted","Data":"2ebba949692d439f42acf027273a8bf4fdfb0812d8b02723e85aee65767aeb93"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.569114 4708 generic.go:334] "Generic (PLEG): container finished" podID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerID="5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64" exitCode=0 Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.569424 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v89mq" event={"ID":"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40","Type":"ContainerDied","Data":"5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.571729 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ppsk2" event={"ID":"0f490b43-b9f5-4e99-89a1-e7c75a45b487","Type":"ContainerStarted","Data":"baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.573820 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5f6fa285-4374-4be5-b4cf-e3dd8ef56762","Type":"ContainerStarted","Data":"5b6699250aa364eb8d431e1f4f7f2fee80ae0ad649740142c9a53e75894ce753"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.576216 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" event={"ID":"c44c6867-b4bc-45f2-9100-cc320788a3c0","Type":"ContainerStarted","Data":"29244a0cb2acbd6fbf2d932bf2748c3a4eae484d29843039c9e21354e958f7b5"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.576730 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.578326 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pb4xp" event={"ID":"3b5a2d58-5ebb-4838-a798-bc280fe99951","Type":"ContainerStarted","Data":"8d53337fff8d9985a592c523579aefd5e4c8dc01dba62d5569ab89538ec12970"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.578849 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-pb4xp" Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.585827 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4hkfj" event={"ID":"69f5f6ed-0270-407f-9b23-68c954638cb1","Type":"ContainerStarted","Data":"6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.595086 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d79b40f9-049c-46ea-8ade-f43e58bc8cd4","Type":"ContainerStarted","Data":"19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.595499 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.598727 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxgzt" event={"ID":"271e64f1-20a8-42ad-962a-0b498d561cdd","Type":"ContainerStarted","Data":"c024c4ec79c6c814bcff0d6c07250edffa31c916e9dd658de321126f03bc837f"} Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.607009 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" podStartSLOduration=18.148670332 podStartE2EDuration="35.606989173s" podCreationTimestamp="2026-02-03 07:25:54 +0000 UTC" firstStartedPulling="2026-02-03 07:25:55.528641117 +0000 UTC m=+934.510587924" lastFinishedPulling="2026-02-03 07:26:12.986959958 +0000 UTC m=+951.968906765" observedRunningTime="2026-02-03 07:26:29.606460201 +0000 UTC m=+968.588407008" watchObservedRunningTime="2026-02-03 07:26:29.606989173 +0000 UTC m=+968.588935980" Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.640748 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=18.810980607 podStartE2EDuration="30.640728339s" podCreationTimestamp="2026-02-03 07:25:59 +0000 UTC" firstStartedPulling="2026-02-03 07:26:13.89907256 +0000 UTC m=+952.881019377" lastFinishedPulling="2026-02-03 07:26:25.728820302 +0000 UTC m=+964.710767109" observedRunningTime="2026-02-03 07:26:29.627009167 +0000 UTC m=+968.608955964" watchObservedRunningTime="2026-02-03 07:26:29.640728339 +0000 UTC m=+968.622675146" Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.763175 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-pb4xp" podStartSLOduration=11.882692757000001 podStartE2EDuration="25.763156439s" podCreationTimestamp="2026-02-03 07:26:04 +0000 UTC" firstStartedPulling="2026-02-03 07:26:14.462102822 +0000 UTC m=+953.444049619" lastFinishedPulling="2026-02-03 07:26:28.342566474 +0000 UTC m=+967.324513301" observedRunningTime="2026-02-03 07:26:29.760490395 +0000 UTC m=+968.742437202" watchObservedRunningTime="2026-02-03 07:26:29.763156439 +0000 UTC m=+968.745103246" Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.776115 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" podStartSLOduration=18.382405692 podStartE2EDuration="35.776096002s" podCreationTimestamp="2026-02-03 07:25:54 +0000 UTC" firstStartedPulling="2026-02-03 07:25:55.634873535 +0000 UTC m=+934.616820342" lastFinishedPulling="2026-02-03 07:26:13.028563845 +0000 UTC m=+952.010510652" observedRunningTime="2026-02-03 07:26:29.77599383 +0000 UTC m=+968.757940637" watchObservedRunningTime="2026-02-03 07:26:29.776096002 +0000 UTC m=+968.758042809" Feb 03 07:26:29 crc kubenswrapper[4708]: I0203 07:26:29.815046 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=12.315608783 podStartE2EDuration="28.815027433s" podCreationTimestamp="2026-02-03 07:26:01 +0000 UTC" firstStartedPulling="2026-02-03 07:26:11.961464665 +0000 UTC m=+950.943411472" lastFinishedPulling="2026-02-03 07:26:28.460883315 +0000 UTC m=+967.442830122" observedRunningTime="2026-02-03 07:26:29.812173805 +0000 UTC m=+968.794120612" watchObservedRunningTime="2026-02-03 07:26:29.815027433 +0000 UTC m=+968.796974240" Feb 03 07:26:30 crc kubenswrapper[4708]: I0203 07:26:30.609337 4708 generic.go:334] "Generic (PLEG): container finished" podID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerID="c024c4ec79c6c814bcff0d6c07250edffa31c916e9dd658de321126f03bc837f" exitCode=0 Feb 03 07:26:30 crc kubenswrapper[4708]: I0203 07:26:30.609405 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxgzt" event={"ID":"271e64f1-20a8-42ad-962a-0b498d561cdd","Type":"ContainerDied","Data":"c024c4ec79c6c814bcff0d6c07250edffa31c916e9dd658de321126f03bc837f"} Feb 03 07:26:30 crc kubenswrapper[4708]: I0203 07:26:30.612127 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0edbabdf-99f1-49b3-83ee-48ad17467638","Type":"ContainerStarted","Data":"b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa"} Feb 03 07:26:30 crc kubenswrapper[4708]: I0203 07:26:30.614435 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2572b4b8-5df3-4d81-9bd7-8ef427c6d945","Type":"ContainerStarted","Data":"2beb5c3e89e59e9e8aba167e3cad443c61c86c2d6b1a629e4cbf9f24206b0baa"} Feb 03 07:26:30 crc kubenswrapper[4708]: I0203 07:26:30.617729 4708 generic.go:334] "Generic (PLEG): container finished" podID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerID="6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d" exitCode=0 Feb 03 07:26:30 crc kubenswrapper[4708]: I0203 07:26:30.617830 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4hkfj" event={"ID":"69f5f6ed-0270-407f-9b23-68c954638cb1","Type":"ContainerDied","Data":"6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d"} Feb 03 07:26:30 crc kubenswrapper[4708]: I0203 07:26:30.627726 4708 generic.go:334] "Generic (PLEG): container finished" podID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerID="baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba" exitCode=0 Feb 03 07:26:30 crc kubenswrapper[4708]: I0203 07:26:30.627784 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ppsk2" event={"ID":"0f490b43-b9f5-4e99-89a1-e7c75a45b487","Type":"ContainerDied","Data":"baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba"} Feb 03 07:26:31 crc kubenswrapper[4708]: I0203 07:26:31.636953 4708 generic.go:334] "Generic (PLEG): container finished" podID="6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024" containerID="5e28d239b9e26bb9b74c613b94aa56ec9a20a6664cb06eefa3bb1a639a9fcb1d" exitCode=0 Feb 03 07:26:31 crc kubenswrapper[4708]: I0203 07:26:31.637082 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-48bcs" event={"ID":"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024","Type":"ContainerDied","Data":"5e28d239b9e26bb9b74c613b94aa56ec9a20a6664cb06eefa3bb1a639a9fcb1d"} Feb 03 07:26:33 crc kubenswrapper[4708]: I0203 07:26:33.680618 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-48bcs" event={"ID":"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024","Type":"ContainerStarted","Data":"2313e1187a1d78a2655af89fa2c63b6790227f356bede3a1d5dfa1d0d2fedb3a"} Feb 03 07:26:34 crc kubenswrapper[4708]: I0203 07:26:34.564435 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Feb 03 07:26:35 crc kubenswrapper[4708]: I0203 07:26:35.002359 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:26:35 crc kubenswrapper[4708]: I0203 07:26:35.380567 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:26:35 crc kubenswrapper[4708]: I0203 07:26:35.435588 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gpnt5"] Feb 03 07:26:35 crc kubenswrapper[4708]: I0203 07:26:35.692327 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" podUID="c44c6867-b4bc-45f2-9100-cc320788a3c0" containerName="dnsmasq-dns" containerID="cri-o://29244a0cb2acbd6fbf2d932bf2748c3a4eae484d29843039c9e21354e958f7b5" gracePeriod=10 Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:37.707504 4708 generic.go:334] "Generic (PLEG): container finished" podID="c44c6867-b4bc-45f2-9100-cc320788a3c0" containerID="29244a0cb2acbd6fbf2d932bf2748c3a4eae484d29843039c9e21354e958f7b5" exitCode=0 Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:37.707573 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" event={"ID":"c44c6867-b4bc-45f2-9100-cc320788a3c0","Type":"ContainerDied","Data":"29244a0cb2acbd6fbf2d932bf2748c3a4eae484d29843039c9e21354e958f7b5"} Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:40.000669 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" podUID="c44c6867-b4bc-45f2-9100-cc320788a3c0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.91:5353: connect: connection refused" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.405619 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.449007 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-8pqzz"] Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.450243 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.475242 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-8pqzz"] Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.496966 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gr8g\" (UniqueName: \"kubernetes.io/projected/95221c6e-0d7b-4961-8d71-15134431bac0-kube-api-access-2gr8g\") pod \"dnsmasq-dns-7cb5889db5-8pqzz\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.497194 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-8pqzz\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.497236 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-config\") pod \"dnsmasq-dns-7cb5889db5-8pqzz\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.599062 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-8pqzz\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.599143 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-config\") pod \"dnsmasq-dns-7cb5889db5-8pqzz\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.599222 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gr8g\" (UniqueName: \"kubernetes.io/projected/95221c6e-0d7b-4961-8d71-15134431bac0-kube-api-access-2gr8g\") pod \"dnsmasq-dns-7cb5889db5-8pqzz\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.600037 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-config\") pod \"dnsmasq-dns-7cb5889db5-8pqzz\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.600504 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-8pqzz\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.623685 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gr8g\" (UniqueName: \"kubernetes.io/projected/95221c6e-0d7b-4961-8d71-15134431bac0-kube-api-access-2gr8g\") pod \"dnsmasq-dns-7cb5889db5-8pqzz\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:41.774971 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.535315 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.540913 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.544973 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.545011 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.545137 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.545019 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-vlvbq" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.560768 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.615168 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a0593ff7-ba15-46be-8879-70dc42f3beb2-cache\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.615232 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgbzt\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-kube-api-access-mgbzt\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.615272 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.615328 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a0593ff7-ba15-46be-8879-70dc42f3beb2-lock\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.615590 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.615655 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0593ff7-ba15-46be-8879-70dc42f3beb2-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.717607 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.717682 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a0593ff7-ba15-46be-8879-70dc42f3beb2-lock\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.717733 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.717759 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0593ff7-ba15-46be-8879-70dc42f3beb2-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.717874 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a0593ff7-ba15-46be-8879-70dc42f3beb2-cache\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.717903 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgbzt\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-kube-api-access-mgbzt\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: E0203 07:26:42.717993 4708 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:26:44 crc kubenswrapper[4708]: E0203 07:26:42.718044 4708 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.718129 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: E0203 07:26:42.718167 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift podName:a0593ff7-ba15-46be-8879-70dc42f3beb2 nodeName:}" failed. No retries permitted until 2026-02-03 07:26:43.218116437 +0000 UTC m=+982.200063404 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift") pod "swift-storage-0" (UID: "a0593ff7-ba15-46be-8879-70dc42f3beb2") : configmap "swift-ring-files" not found Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.718330 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a0593ff7-ba15-46be-8879-70dc42f3beb2-lock\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.718612 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a0593ff7-ba15-46be-8879-70dc42f3beb2-cache\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.725452 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0593ff7-ba15-46be-8879-70dc42f3beb2-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.737982 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.738674 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgbzt\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-kube-api-access-mgbzt\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.984814 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-4x9fb"] Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.986406 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:42.989789 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.002290 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.002333 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.011071 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-4x9fb"] Feb 03 07:26:44 crc kubenswrapper[4708]: E0203 07:26:43.011666 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-tdxwp ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-4x9fb" podUID="00a84ff9-5edc-4e18-a656-0a303d9c734c" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.023409 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-swiftconf\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.023495 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdxwp\" (UniqueName: \"kubernetes.io/projected/00a84ff9-5edc-4e18-a656-0a303d9c734c-kube-api-access-tdxwp\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.023578 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-combined-ca-bundle\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.023644 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-scripts\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.023789 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-dispersionconf\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.023951 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-ring-data-devices\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.023991 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/00a84ff9-5edc-4e18-a656-0a303d9c734c-etc-swift\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.024395 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-zljlj"] Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.025788 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.031976 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-4x9fb"] Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.045678 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-zljlj"] Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.125442 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-ring-data-devices\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.125509 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-swiftconf\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.125613 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-combined-ca-bundle\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.125692 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndjdn\" (UniqueName: \"kubernetes.io/projected/fdec39a4-6222-4122-901f-4a6603afc348-kube-api-access-ndjdn\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.125731 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdxwp\" (UniqueName: \"kubernetes.io/projected/00a84ff9-5edc-4e18-a656-0a303d9c734c-kube-api-access-tdxwp\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.125770 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/fdec39a4-6222-4122-901f-4a6603afc348-etc-swift\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.125833 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-combined-ca-bundle\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.125906 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-scripts\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.125960 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-dispersionconf\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.125988 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-dispersionconf\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.126012 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-scripts\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.126030 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-swiftconf\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.126185 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-ring-data-devices\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.126233 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/00a84ff9-5edc-4e18-a656-0a303d9c734c-etc-swift\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.126868 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/00a84ff9-5edc-4e18-a656-0a303d9c734c-etc-swift\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.126907 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-ring-data-devices\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.126974 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-scripts\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.128576 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-swiftconf\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.129276 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-combined-ca-bundle\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.129348 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-dispersionconf\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.140206 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdxwp\" (UniqueName: \"kubernetes.io/projected/00a84ff9-5edc-4e18-a656-0a303d9c734c-kube-api-access-tdxwp\") pod \"swift-ring-rebalance-4x9fb\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.227626 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-ring-data-devices\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.227686 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-combined-ca-bundle\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.227711 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndjdn\" (UniqueName: \"kubernetes.io/projected/fdec39a4-6222-4122-901f-4a6603afc348-kube-api-access-ndjdn\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.227736 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/fdec39a4-6222-4122-901f-4a6603afc348-etc-swift\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.227771 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.227825 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-dispersionconf\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.227856 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-scripts\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.227873 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-swiftconf\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.231097 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-scripts\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.231613 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-ring-data-devices\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.231907 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/fdec39a4-6222-4122-901f-4a6603afc348-etc-swift\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: E0203 07:26:43.232028 4708 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:26:44 crc kubenswrapper[4708]: E0203 07:26:43.232046 4708 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:26:44 crc kubenswrapper[4708]: E0203 07:26:43.232105 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift podName:a0593ff7-ba15-46be-8879-70dc42f3beb2 nodeName:}" failed. No retries permitted until 2026-02-03 07:26:44.232087718 +0000 UTC m=+983.214034545 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift") pod "swift-storage-0" (UID: "a0593ff7-ba15-46be-8879-70dc42f3beb2") : configmap "swift-ring-files" not found Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.235718 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-dispersionconf\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.240451 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-combined-ca-bundle\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.244948 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-swiftconf\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.258905 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndjdn\" (UniqueName: \"kubernetes.io/projected/fdec39a4-6222-4122-901f-4a6603afc348-kube-api-access-ndjdn\") pod \"swift-ring-rebalance-zljlj\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.343399 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.761375 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.776301 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.840266 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-scripts\") pod \"00a84ff9-5edc-4e18-a656-0a303d9c734c\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.840617 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-ring-data-devices\") pod \"00a84ff9-5edc-4e18-a656-0a303d9c734c\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.840646 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-swiftconf\") pod \"00a84ff9-5edc-4e18-a656-0a303d9c734c\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.840685 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-combined-ca-bundle\") pod \"00a84ff9-5edc-4e18-a656-0a303d9c734c\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.840776 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/00a84ff9-5edc-4e18-a656-0a303d9c734c-etc-swift\") pod \"00a84ff9-5edc-4e18-a656-0a303d9c734c\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.840847 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-dispersionconf\") pod \"00a84ff9-5edc-4e18-a656-0a303d9c734c\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.840903 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdxwp\" (UniqueName: \"kubernetes.io/projected/00a84ff9-5edc-4e18-a656-0a303d9c734c-kube-api-access-tdxwp\") pod \"00a84ff9-5edc-4e18-a656-0a303d9c734c\" (UID: \"00a84ff9-5edc-4e18-a656-0a303d9c734c\") " Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.840962 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-scripts" (OuterVolumeSpecName: "scripts") pod "00a84ff9-5edc-4e18-a656-0a303d9c734c" (UID: "00a84ff9-5edc-4e18-a656-0a303d9c734c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.841169 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "00a84ff9-5edc-4e18-a656-0a303d9c734c" (UID: "00a84ff9-5edc-4e18-a656-0a303d9c734c"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.841387 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.841404 4708 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/00a84ff9-5edc-4e18-a656-0a303d9c734c-ring-data-devices\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.841826 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00a84ff9-5edc-4e18-a656-0a303d9c734c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "00a84ff9-5edc-4e18-a656-0a303d9c734c" (UID: "00a84ff9-5edc-4e18-a656-0a303d9c734c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.844696 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00a84ff9-5edc-4e18-a656-0a303d9c734c-kube-api-access-tdxwp" (OuterVolumeSpecName: "kube-api-access-tdxwp") pod "00a84ff9-5edc-4e18-a656-0a303d9c734c" (UID: "00a84ff9-5edc-4e18-a656-0a303d9c734c"). InnerVolumeSpecName "kube-api-access-tdxwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.845496 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "00a84ff9-5edc-4e18-a656-0a303d9c734c" (UID: "00a84ff9-5edc-4e18-a656-0a303d9c734c"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.846572 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "00a84ff9-5edc-4e18-a656-0a303d9c734c" (UID: "00a84ff9-5edc-4e18-a656-0a303d9c734c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.850606 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "00a84ff9-5edc-4e18-a656-0a303d9c734c" (UID: "00a84ff9-5edc-4e18-a656-0a303d9c734c"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.942877 4708 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-swiftconf\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.942913 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.942928 4708 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/00a84ff9-5edc-4e18-a656-0a303d9c734c-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.942939 4708 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/00a84ff9-5edc-4e18-a656-0a303d9c734c-dispersionconf\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:43.942951 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdxwp\" (UniqueName: \"kubernetes.io/projected/00a84ff9-5edc-4e18-a656-0a303d9c734c-kube-api-access-tdxwp\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:44.248366 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:44 crc kubenswrapper[4708]: E0203 07:26:44.248528 4708 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:26:44 crc kubenswrapper[4708]: E0203 07:26:44.248546 4708 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:26:44 crc kubenswrapper[4708]: E0203 07:26:44.248594 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift podName:a0593ff7-ba15-46be-8879-70dc42f3beb2 nodeName:}" failed. No retries permitted until 2026-02-03 07:26:46.248579721 +0000 UTC m=+985.230526528 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift") pod "swift-storage-0" (UID: "a0593ff7-ba15-46be-8879-70dc42f3beb2") : configmap "swift-ring-files" not found Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:44.766540 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4x9fb" Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:44.808054 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-4x9fb"] Feb 03 07:26:44 crc kubenswrapper[4708]: I0203 07:26:44.834576 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-4x9fb"] Feb 03 07:26:45 crc kubenswrapper[4708]: I0203 07:26:45.001050 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" podUID="c44c6867-b4bc-45f2-9100-cc320788a3c0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.91:5353: connect: connection refused" Feb 03 07:26:45 crc kubenswrapper[4708]: I0203 07:26:45.777025 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-48bcs" event={"ID":"6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024","Type":"ContainerStarted","Data":"5ed217fed3f57dd21b6c6f756f41aedaeb99dee2b25f053e3cf717b2c950974c"} Feb 03 07:26:46 crc kubenswrapper[4708]: I0203 07:26:46.102511 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00a84ff9-5edc-4e18-a656-0a303d9c734c" path="/var/lib/kubelet/pods/00a84ff9-5edc-4e18-a656-0a303d9c734c/volumes" Feb 03 07:26:46 crc kubenswrapper[4708]: I0203 07:26:46.278619 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:46 crc kubenswrapper[4708]: E0203 07:26:46.278852 4708 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:26:46 crc kubenswrapper[4708]: E0203 07:26:46.278878 4708 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:26:46 crc kubenswrapper[4708]: E0203 07:26:46.278939 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift podName:a0593ff7-ba15-46be-8879-70dc42f3beb2 nodeName:}" failed. No retries permitted until 2026-02-03 07:26:50.278920762 +0000 UTC m=+989.260867579 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift") pod "swift-storage-0" (UID: "a0593ff7-ba15-46be-8879-70dc42f3beb2") : configmap "swift-ring-files" not found Feb 03 07:26:47 crc kubenswrapper[4708]: I0203 07:26:47.809388 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:47 crc kubenswrapper[4708]: I0203 07:26:47.809767 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:26:47 crc kubenswrapper[4708]: I0203 07:26:47.885546 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-48bcs" podStartSLOduration=32.664391862 podStartE2EDuration="43.88551855s" podCreationTimestamp="2026-02-03 07:26:04 +0000 UTC" firstStartedPulling="2026-02-03 07:26:14.507809617 +0000 UTC m=+953.489756424" lastFinishedPulling="2026-02-03 07:26:25.728936305 +0000 UTC m=+964.710883112" observedRunningTime="2026-02-03 07:26:47.877026973 +0000 UTC m=+986.858973820" watchObservedRunningTime="2026-02-03 07:26:47.88551855 +0000 UTC m=+986.867465387" Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.440545 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.520010 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-config\") pod \"c44c6867-b4bc-45f2-9100-cc320788a3c0\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.520371 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hr4f6\" (UniqueName: \"kubernetes.io/projected/c44c6867-b4bc-45f2-9100-cc320788a3c0-kube-api-access-hr4f6\") pod \"c44c6867-b4bc-45f2-9100-cc320788a3c0\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.520428 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-dns-svc\") pod \"c44c6867-b4bc-45f2-9100-cc320788a3c0\" (UID: \"c44c6867-b4bc-45f2-9100-cc320788a3c0\") " Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.562136 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c44c6867-b4bc-45f2-9100-cc320788a3c0-kube-api-access-hr4f6" (OuterVolumeSpecName: "kube-api-access-hr4f6") pod "c44c6867-b4bc-45f2-9100-cc320788a3c0" (UID: "c44c6867-b4bc-45f2-9100-cc320788a3c0"). InnerVolumeSpecName "kube-api-access-hr4f6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.578610 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c44c6867-b4bc-45f2-9100-cc320788a3c0" (UID: "c44c6867-b4bc-45f2-9100-cc320788a3c0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.584244 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-config" (OuterVolumeSpecName: "config") pod "c44c6867-b4bc-45f2-9100-cc320788a3c0" (UID: "c44c6867-b4bc-45f2-9100-cc320788a3c0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.632837 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hr4f6\" (UniqueName: \"kubernetes.io/projected/c44c6867-b4bc-45f2-9100-cc320788a3c0-kube-api-access-hr4f6\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.632873 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.632885 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c44c6867-b4bc-45f2-9100-cc320788a3c0-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:48 crc kubenswrapper[4708]: E0203 07:26:48.766886 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified" Feb 03 07:26:48 crc kubenswrapper[4708]: E0203 07:26:48.767098 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstack-network-exporter,Image:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,Command:[/app/openstack-network-exporter],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPENSTACK_NETWORK_EXPORTER_YAML,Value:/etc/config/openstack-network-exporter.yaml,ValueFrom:nil,},EnvVar{Name:CONFIG_HASH,Value:nf5h7dhd4h584h5cdh68bh697hbchcch5d4h664h5f9h95h57ch689hf9hb7h689h686h549h666h676h5fh688h66fh8h569h56bh5cbh99h95h86q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovnmetrics.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovnmetrics.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8hjjx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-nb-0_openstack(6c70c9bb-deb5-45aa-96e6-aea4e711f93a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:26:48 crc kubenswrapper[4708]: E0203 07:26:48.768395 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-nb-0" podUID="6c70c9bb-deb5-45aa-96e6-aea4e711f93a" Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.820303 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" event={"ID":"c44c6867-b4bc-45f2-9100-cc320788a3c0","Type":"ContainerDied","Data":"2cb623c3e8fd4e358655177877eabdbb14a17788a4cf2008f334d49506b6f8e4"} Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.820381 4708 scope.go:117] "RemoveContainer" containerID="29244a0cb2acbd6fbf2d932bf2748c3a4eae484d29843039c9e21354e958f7b5" Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.820516 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-gpnt5" Feb 03 07:26:48 crc kubenswrapper[4708]: E0203 07:26:48.838435 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"" pod="openstack/ovsdbserver-nb-0" podUID="6c70c9bb-deb5-45aa-96e6-aea4e711f93a" Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.873309 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gpnt5"] Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.881996 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gpnt5"] Feb 03 07:26:48 crc kubenswrapper[4708]: I0203 07:26:48.901373 4708 scope.go:117] "RemoveContainer" containerID="3297b6d569921f53e586543d859b93af3cec53981be07cb89e4dde63179f6387" Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.104511 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.141473 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.353239 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-zljlj"] Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.358664 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-8pqzz"] Feb 03 07:26:49 crc kubenswrapper[4708]: W0203 07:26:49.358692 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95221c6e_0d7b_4961_8d71_15134431bac0.slice/crio-f6460754c0310fb49e2fb621d93fcae14bada468e55c9f2581d2c210dc4acb5c WatchSource:0}: Error finding container f6460754c0310fb49e2fb621d93fcae14bada468e55c9f2581d2c210dc4acb5c: Status 404 returned error can't find the container with id f6460754c0310fb49e2fb621d93fcae14bada468e55c9f2581d2c210dc4acb5c Feb 03 07:26:49 crc kubenswrapper[4708]: W0203 07:26:49.365392 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdec39a4_6222_4122_901f_4a6603afc348.slice/crio-d6e108b77428f4041a914087f37a87cc499f750be6ef27a6b773dfe696cd67fd WatchSource:0}: Error finding container d6e108b77428f4041a914087f37a87cc499f750be6ef27a6b773dfe696cd67fd: Status 404 returned error can't find the container with id d6e108b77428f4041a914087f37a87cc499f750be6ef27a6b773dfe696cd67fd Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.840494 4708 generic.go:334] "Generic (PLEG): container finished" podID="95221c6e-0d7b-4961-8d71-15134431bac0" containerID="14994cf9a023cb7781145284f56231857e24519c82dbca7f71d87cdd36217c24" exitCode=0 Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.840597 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" event={"ID":"95221c6e-0d7b-4961-8d71-15134431bac0","Type":"ContainerDied","Data":"14994cf9a023cb7781145284f56231857e24519c82dbca7f71d87cdd36217c24"} Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.840910 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" event={"ID":"95221c6e-0d7b-4961-8d71-15134431bac0","Type":"ContainerStarted","Data":"f6460754c0310fb49e2fb621d93fcae14bada468e55c9f2581d2c210dc4acb5c"} Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.843581 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d09d6786-b507-4848-977f-a5e94b77d0ad","Type":"ContainerStarted","Data":"928e9a8284124db04d542332f6ef14149e72762891177726887ec2498994d949"} Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.846316 4708 generic.go:334] "Generic (PLEG): container finished" podID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerID="dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514" exitCode=0 Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.856594 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v89mq" event={"ID":"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40","Type":"ContainerDied","Data":"dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514"} Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.874409 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4hkfj" event={"ID":"69f5f6ed-0270-407f-9b23-68c954638cb1","Type":"ContainerStarted","Data":"c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3"} Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.882171 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ppsk2" event={"ID":"0f490b43-b9f5-4e99-89a1-e7c75a45b487","Type":"ContainerStarted","Data":"53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30"} Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.886477 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zljlj" event={"ID":"fdec39a4-6222-4122-901f-4a6603afc348","Type":"ContainerStarted","Data":"d6e108b77428f4041a914087f37a87cc499f750be6ef27a6b773dfe696cd67fd"} Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.901067 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxgzt" event={"ID":"271e64f1-20a8-42ad-962a-0b498d561cdd","Type":"ContainerStarted","Data":"cb392ead4fab52dfc293be5e8f9dcca074189cc653525be55e5484146a35e2e6"} Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.903460 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.905750 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.963665 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.969599 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ppsk2" podStartSLOduration=17.558873841 podStartE2EDuration="48.969573505s" podCreationTimestamp="2026-02-03 07:26:01 +0000 UTC" firstStartedPulling="2026-02-03 07:26:17.490646142 +0000 UTC m=+956.472592949" lastFinishedPulling="2026-02-03 07:26:48.901345796 +0000 UTC m=+987.883292613" observedRunningTime="2026-02-03 07:26:49.941201171 +0000 UTC m=+988.923147978" watchObservedRunningTime="2026-02-03 07:26:49.969573505 +0000 UTC m=+988.951520312" Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.978738 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4hkfj" podStartSLOduration=20.773188409 podStartE2EDuration="45.978702669s" podCreationTimestamp="2026-02-03 07:26:04 +0000 UTC" firstStartedPulling="2026-02-03 07:26:23.614008104 +0000 UTC m=+962.595954911" lastFinishedPulling="2026-02-03 07:26:48.819522324 +0000 UTC m=+987.801469171" observedRunningTime="2026-02-03 07:26:49.964906591 +0000 UTC m=+988.946853418" watchObservedRunningTime="2026-02-03 07:26:49.978702669 +0000 UTC m=+988.960649476" Feb 03 07:26:49 crc kubenswrapper[4708]: I0203 07:26:49.990351 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=12.039539041 podStartE2EDuration="43.990331043s" podCreationTimestamp="2026-02-03 07:26:06 +0000 UTC" firstStartedPulling="2026-02-03 07:26:17.278264187 +0000 UTC m=+956.260210994" lastFinishedPulling="2026-02-03 07:26:49.229056189 +0000 UTC m=+988.211002996" observedRunningTime="2026-02-03 07:26:49.980853531 +0000 UTC m=+988.962800338" watchObservedRunningTime="2026-02-03 07:26:49.990331043 +0000 UTC m=+988.972277850" Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.015163 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.071323 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pxgzt" podStartSLOduration=26.986307267 podStartE2EDuration="52.071301316s" podCreationTimestamp="2026-02-03 07:25:58 +0000 UTC" firstStartedPulling="2026-02-03 07:26:23.613955103 +0000 UTC m=+962.595901930" lastFinishedPulling="2026-02-03 07:26:48.698949132 +0000 UTC m=+987.680895979" observedRunningTime="2026-02-03 07:26:50.060325347 +0000 UTC m=+989.042272174" watchObservedRunningTime="2026-02-03 07:26:50.071301316 +0000 UTC m=+989.053248123" Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.110868 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c44c6867-b4bc-45f2-9100-cc320788a3c0" path="/var/lib/kubelet/pods/c44c6867-b4bc-45f2-9100-cc320788a3c0/volumes" Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.279679 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:50 crc kubenswrapper[4708]: E0203 07:26:50.279961 4708 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:26:50 crc kubenswrapper[4708]: E0203 07:26:50.280010 4708 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:26:50 crc kubenswrapper[4708]: E0203 07:26:50.280074 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift podName:a0593ff7-ba15-46be-8879-70dc42f3beb2 nodeName:}" failed. No retries permitted until 2026-02-03 07:26:58.280056966 +0000 UTC m=+997.262003773 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift") pod "swift-storage-0" (UID: "a0593ff7-ba15-46be-8879-70dc42f3beb2") : configmap "swift-ring-files" not found Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.921877 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v89mq" event={"ID":"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40","Type":"ContainerStarted","Data":"b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491"} Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.929644 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"6c70c9bb-deb5-45aa-96e6-aea4e711f93a","Type":"ContainerStarted","Data":"77bc2f1459d9b95d7aa865b2f98e2b9a6a44f1a1a2683224f7f768c26ad895a8"} Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.934145 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" event={"ID":"95221c6e-0d7b-4961-8d71-15134431bac0","Type":"ContainerStarted","Data":"a6fc7ab0a9b72591bc628a0dd921b56e9943d50243299060aee4967d22aab7fe"} Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.934181 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.935040 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.975290 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=32.136764686 podStartE2EDuration="45.975270834s" podCreationTimestamp="2026-02-03 07:26:05 +0000 UTC" firstStartedPulling="2026-02-03 07:26:14.507621613 +0000 UTC m=+953.489568420" lastFinishedPulling="2026-02-03 07:26:28.346127761 +0000 UTC m=+967.328074568" observedRunningTime="2026-02-03 07:26:50.954630199 +0000 UTC m=+989.936577006" watchObservedRunningTime="2026-02-03 07:26:50.975270834 +0000 UTC m=+989.957217661" Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.982390 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" podStartSLOduration=9.982345947 podStartE2EDuration="9.982345947s" podCreationTimestamp="2026-02-03 07:26:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:26:50.971906221 +0000 UTC m=+989.953853028" watchObservedRunningTime="2026-02-03 07:26:50.982345947 +0000 UTC m=+989.964292754" Feb 03 07:26:50 crc kubenswrapper[4708]: I0203 07:26:50.987379 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.138542 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-8pqzz"] Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.178248 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-xxk4z"] Feb 03 07:26:51 crc kubenswrapper[4708]: E0203 07:26:51.178657 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c44c6867-b4bc-45f2-9100-cc320788a3c0" containerName="dnsmasq-dns" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.178679 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="c44c6867-b4bc-45f2-9100-cc320788a3c0" containerName="dnsmasq-dns" Feb 03 07:26:51 crc kubenswrapper[4708]: E0203 07:26:51.178707 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c44c6867-b4bc-45f2-9100-cc320788a3c0" containerName="init" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.178714 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="c44c6867-b4bc-45f2-9100-cc320788a3c0" containerName="init" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.178932 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="c44c6867-b4bc-45f2-9100-cc320788a3c0" containerName="dnsmasq-dns" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.183155 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.186428 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.195470 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-xxk4z"] Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.291448 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-44cl7"] Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.292922 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.295507 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.307774 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r6rk\" (UniqueName: \"kubernetes.io/projected/2ea30931-3f76-46f9-a2c8-52cbcd57411d-kube-api-access-9r6rk\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.307835 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.307858 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.307909 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-config\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.337427 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-44cl7"] Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.381298 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-xxk4z"] Feb 03 07:26:51 crc kubenswrapper[4708]: E0203 07:26:51.381960 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-9r6rk ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" podUID="2ea30931-3f76-46f9-a2c8-52cbcd57411d" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.404896 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-85cjv"] Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.406637 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.412142 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.413677 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.413761 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-config\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.413804 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8stm\" (UniqueName: \"kubernetes.io/projected/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-kube-api-access-t8stm\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.413844 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-config\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.413877 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-ovn-rundir\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.413905 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-combined-ca-bundle\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.413942 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-ovs-rundir\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.413967 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.414060 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r6rk\" (UniqueName: \"kubernetes.io/projected/2ea30931-3f76-46f9-a2c8-52cbcd57411d-kube-api-access-9r6rk\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.414086 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.414941 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.415508 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-config\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.415517 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.431719 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-85cjv"] Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.480844 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r6rk\" (UniqueName: \"kubernetes.io/projected/2ea30931-3f76-46f9-a2c8-52cbcd57411d-kube-api-access-9r6rk\") pod \"dnsmasq-dns-74f6f696b9-xxk4z\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.517717 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-dns-svc\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.517799 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8stm\" (UniqueName: \"kubernetes.io/projected/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-kube-api-access-t8stm\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.517832 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-config\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.517849 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-ovn-rundir\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.517867 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-combined-ca-bundle\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.517897 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-ovs-rundir\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.517915 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.517935 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-config\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.517982 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.518015 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.518038 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbjtm\" (UniqueName: \"kubernetes.io/projected/412c823d-20ba-42cb-9c05-70bee2ee89a2-kube-api-access-hbjtm\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.519044 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-config\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.519293 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-ovn-rundir\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.519760 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-ovs-rundir\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.529496 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-combined-ca-bundle\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.531485 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.580400 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8stm\" (UniqueName: \"kubernetes.io/projected/2acc0e69-490c-4b5c-8486-bf0fd3fb6316-kube-api-access-t8stm\") pod \"ovn-controller-metrics-44cl7\" (UID: \"2acc0e69-490c-4b5c-8486-bf0fd3fb6316\") " pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.614599 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-44cl7" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.623299 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.623374 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.623401 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbjtm\" (UniqueName: \"kubernetes.io/projected/412c823d-20ba-42cb-9c05-70bee2ee89a2-kube-api-access-hbjtm\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.623447 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-dns-svc\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.623527 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-config\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.627143 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.646431 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.646432 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-config\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.648388 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-dns-svc\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.684885 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbjtm\" (UniqueName: \"kubernetes.io/projected/412c823d-20ba-42cb-9c05-70bee2ee89a2-kube-api-access-hbjtm\") pod \"dnsmasq-dns-698758b865-85cjv\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.710890 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.712868 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.722144 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.722183 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.722392 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-vwhfq" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.722507 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.749868 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.753843 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.827881 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1a2b7e5-23d7-48f6-b144-d575da1e613d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.828185 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d1a2b7e5-23d7-48f6-b144-d575da1e613d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.828208 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck86g\" (UniqueName: \"kubernetes.io/projected/d1a2b7e5-23d7-48f6-b144-d575da1e613d-kube-api-access-ck86g\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.828233 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a2b7e5-23d7-48f6-b144-d575da1e613d-config\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.828266 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1a2b7e5-23d7-48f6-b144-d575da1e613d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.828341 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d1a2b7e5-23d7-48f6-b144-d575da1e613d-scripts\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.828471 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1a2b7e5-23d7-48f6-b144-d575da1e613d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.929773 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1a2b7e5-23d7-48f6-b144-d575da1e613d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.929854 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d1a2b7e5-23d7-48f6-b144-d575da1e613d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.929885 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck86g\" (UniqueName: \"kubernetes.io/projected/d1a2b7e5-23d7-48f6-b144-d575da1e613d-kube-api-access-ck86g\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.929921 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a2b7e5-23d7-48f6-b144-d575da1e613d-config\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.929970 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1a2b7e5-23d7-48f6-b144-d575da1e613d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.930046 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d1a2b7e5-23d7-48f6-b144-d575da1e613d-scripts\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.930082 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1a2b7e5-23d7-48f6-b144-d575da1e613d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.930438 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d1a2b7e5-23d7-48f6-b144-d575da1e613d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.931241 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a2b7e5-23d7-48f6-b144-d575da1e613d-config\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.932044 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d1a2b7e5-23d7-48f6-b144-d575da1e613d-scripts\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.933763 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1a2b7e5-23d7-48f6-b144-d575da1e613d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.936072 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1a2b7e5-23d7-48f6-b144-d575da1e613d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.936396 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1a2b7e5-23d7-48f6-b144-d575da1e613d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.944841 4708 generic.go:334] "Generic (PLEG): container finished" podID="5f6fa285-4374-4be5-b4cf-e3dd8ef56762" containerID="5b6699250aa364eb8d431e1f4f7f2fee80ae0ad649740142c9a53e75894ce753" exitCode=0 Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.944933 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5f6fa285-4374-4be5-b4cf-e3dd8ef56762","Type":"ContainerDied","Data":"5b6699250aa364eb8d431e1f4f7f2fee80ae0ad649740142c9a53e75894ce753"} Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.948300 4708 generic.go:334] "Generic (PLEG): container finished" podID="8484d145-abd4-4112-b81c-338bf4d9285f" containerID="688ee5050169079715ea82cb5b9709b12774727cb5c229d24a225ed756c8b252" exitCode=0 Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.948451 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8484d145-abd4-4112-b81c-338bf4d9285f","Type":"ContainerDied","Data":"688ee5050169079715ea82cb5b9709b12774727cb5c229d24a225ed756c8b252"} Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.948987 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck86g\" (UniqueName: \"kubernetes.io/projected/d1a2b7e5-23d7-48f6-b144-d575da1e613d-kube-api-access-ck86g\") pod \"ovn-northd-0\" (UID: \"d1a2b7e5-23d7-48f6-b144-d575da1e613d\") " pod="openstack/ovn-northd-0" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.949144 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:51 crc kubenswrapper[4708]: I0203 07:26:51.958979 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.021971 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-v89mq" podStartSLOduration=14.935264989 podStartE2EDuration="36.021948755s" podCreationTimestamp="2026-02-03 07:26:16 +0000 UTC" firstStartedPulling="2026-02-03 07:26:29.570480971 +0000 UTC m=+968.552427788" lastFinishedPulling="2026-02-03 07:26:50.657164747 +0000 UTC m=+989.639111554" observedRunningTime="2026-02-03 07:26:52.013852017 +0000 UTC m=+990.995798824" watchObservedRunningTime="2026-02-03 07:26:52.021948755 +0000 UTC m=+991.003895552" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.047375 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.055586 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.055628 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.125747 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.134618 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r6rk\" (UniqueName: \"kubernetes.io/projected/2ea30931-3f76-46f9-a2c8-52cbcd57411d-kube-api-access-9r6rk\") pod \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.134689 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-ovsdbserver-nb\") pod \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.134767 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-config\") pod \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.134850 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-dns-svc\") pod \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\" (UID: \"2ea30931-3f76-46f9-a2c8-52cbcd57411d\") " Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.138352 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ea30931-3f76-46f9-a2c8-52cbcd57411d-kube-api-access-9r6rk" (OuterVolumeSpecName: "kube-api-access-9r6rk") pod "2ea30931-3f76-46f9-a2c8-52cbcd57411d" (UID: "2ea30931-3f76-46f9-a2c8-52cbcd57411d"). InnerVolumeSpecName "kube-api-access-9r6rk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.138754 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2ea30931-3f76-46f9-a2c8-52cbcd57411d" (UID: "2ea30931-3f76-46f9-a2c8-52cbcd57411d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.140234 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-config" (OuterVolumeSpecName: "config") pod "2ea30931-3f76-46f9-a2c8-52cbcd57411d" (UID: "2ea30931-3f76-46f9-a2c8-52cbcd57411d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.140762 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2ea30931-3f76-46f9-a2c8-52cbcd57411d" (UID: "2ea30931-3f76-46f9-a2c8-52cbcd57411d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.237213 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.237246 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r6rk\" (UniqueName: \"kubernetes.io/projected/2ea30931-3f76-46f9-a2c8-52cbcd57411d-kube-api-access-9r6rk\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.237256 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.237265 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ea30931-3f76-46f9-a2c8-52cbcd57411d-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.955798 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" podUID="95221c6e-0d7b-4961-8d71-15134431bac0" containerName="dnsmasq-dns" containerID="cri-o://a6fc7ab0a9b72591bc628a0dd921b56e9943d50243299060aee4967d22aab7fe" gracePeriod=10 Feb 03 07:26:52 crc kubenswrapper[4708]: I0203 07:26:52.955737 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-xxk4z" Feb 03 07:26:53 crc kubenswrapper[4708]: I0203 07:26:53.021304 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-xxk4z"] Feb 03 07:26:53 crc kubenswrapper[4708]: I0203 07:26:53.043041 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-xxk4z"] Feb 03 07:26:53 crc kubenswrapper[4708]: I0203 07:26:53.832995 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:26:53 crc kubenswrapper[4708]: I0203 07:26:53.833047 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:26:53 crc kubenswrapper[4708]: I0203 07:26:53.964390 4708 generic.go:334] "Generic (PLEG): container finished" podID="95221c6e-0d7b-4961-8d71-15134431bac0" containerID="a6fc7ab0a9b72591bc628a0dd921b56e9943d50243299060aee4967d22aab7fe" exitCode=0 Feb 03 07:26:53 crc kubenswrapper[4708]: I0203 07:26:53.964439 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" event={"ID":"95221c6e-0d7b-4961-8d71-15134431bac0","Type":"ContainerDied","Data":"a6fc7ab0a9b72591bc628a0dd921b56e9943d50243299060aee4967d22aab7fe"} Feb 03 07:26:54 crc kubenswrapper[4708]: I0203 07:26:54.103775 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ea30931-3f76-46f9-a2c8-52cbcd57411d" path="/var/lib/kubelet/pods/2ea30931-3f76-46f9-a2c8-52cbcd57411d/volumes" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.053824 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.054788 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.117520 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.426446 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.571665 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-85cjv"] Feb 03 07:26:55 crc kubenswrapper[4708]: W0203 07:26:55.575169 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod412c823d_20ba_42cb_9c05_70bee2ee89a2.slice/crio-3ed74be5574684387540bf6c9e2a8f152e859d2bef3cbcfcc6f4e6a4bde1a517 WatchSource:0}: Error finding container 3ed74be5574684387540bf6c9e2a8f152e859d2bef3cbcfcc6f4e6a4bde1a517: Status 404 returned error can't find the container with id 3ed74be5574684387540bf6c9e2a8f152e859d2bef3cbcfcc6f4e6a4bde1a517 Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.598441 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gr8g\" (UniqueName: \"kubernetes.io/projected/95221c6e-0d7b-4961-8d71-15134431bac0-kube-api-access-2gr8g\") pod \"95221c6e-0d7b-4961-8d71-15134431bac0\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.598585 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-config\") pod \"95221c6e-0d7b-4961-8d71-15134431bac0\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.598629 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-dns-svc\") pod \"95221c6e-0d7b-4961-8d71-15134431bac0\" (UID: \"95221c6e-0d7b-4961-8d71-15134431bac0\") " Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.610128 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95221c6e-0d7b-4961-8d71-15134431bac0-kube-api-access-2gr8g" (OuterVolumeSpecName: "kube-api-access-2gr8g") pod "95221c6e-0d7b-4961-8d71-15134431bac0" (UID: "95221c6e-0d7b-4961-8d71-15134431bac0"). InnerVolumeSpecName "kube-api-access-2gr8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.614637 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.657462 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "95221c6e-0d7b-4961-8d71-15134431bac0" (UID: "95221c6e-0d7b-4961-8d71-15134431bac0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.658196 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-config" (OuterVolumeSpecName: "config") pod "95221c6e-0d7b-4961-8d71-15134431bac0" (UID: "95221c6e-0d7b-4961-8d71-15134431bac0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.701421 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gr8g\" (UniqueName: \"kubernetes.io/projected/95221c6e-0d7b-4961-8d71-15134431bac0-kube-api-access-2gr8g\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.701443 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.701454 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95221c6e-0d7b-4961-8d71-15134431bac0-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.871891 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-44cl7"] Feb 03 07:26:55 crc kubenswrapper[4708]: W0203 07:26:55.876546 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2acc0e69_490c_4b5c_8486_bf0fd3fb6316.slice/crio-c2cdb0073aeeacaf3e25d805c358f1f20cdfbe43745be504deb79e7e3b924051 WatchSource:0}: Error finding container c2cdb0073aeeacaf3e25d805c358f1f20cdfbe43745be504deb79e7e3b924051: Status 404 returned error can't find the container with id c2cdb0073aeeacaf3e25d805c358f1f20cdfbe43745be504deb79e7e3b924051 Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.986329 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8484d145-abd4-4112-b81c-338bf4d9285f","Type":"ContainerStarted","Data":"6a8243f769095fd7acfb0f51c3e2d9c5459e46a1aaaf9d771cdb635683ea51de"} Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.990934 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" event={"ID":"95221c6e-0d7b-4961-8d71-15134431bac0","Type":"ContainerDied","Data":"f6460754c0310fb49e2fb621d93fcae14bada468e55c9f2581d2c210dc4acb5c"} Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.990950 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-8pqzz" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.991148 4708 scope.go:117] "RemoveContainer" containerID="a6fc7ab0a9b72591bc628a0dd921b56e9943d50243299060aee4967d22aab7fe" Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.992389 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d1a2b7e5-23d7-48f6-b144-d575da1e613d","Type":"ContainerStarted","Data":"41b85ddd53d03397eebf89f5e2effcca802297fb8519f187679d74c6afa5af83"} Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.994204 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zljlj" event={"ID":"fdec39a4-6222-4122-901f-4a6603afc348","Type":"ContainerStarted","Data":"aadd6bf8b71b75793e57365b77d3240a13a8fc3aa2124a10a66c1e6c26111c20"} Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.995182 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-44cl7" event={"ID":"2acc0e69-490c-4b5c-8486-bf0fd3fb6316","Type":"ContainerStarted","Data":"c2cdb0073aeeacaf3e25d805c358f1f20cdfbe43745be504deb79e7e3b924051"} Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.996441 4708 generic.go:334] "Generic (PLEG): container finished" podID="412c823d-20ba-42cb-9c05-70bee2ee89a2" containerID="0e2b31aff1ea9a650da4a40bf6a2a234e0252278e569ade611e9b231f596a8cd" exitCode=0 Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.996573 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-85cjv" event={"ID":"412c823d-20ba-42cb-9c05-70bee2ee89a2","Type":"ContainerDied","Data":"0e2b31aff1ea9a650da4a40bf6a2a234e0252278e569ade611e9b231f596a8cd"} Feb 03 07:26:55 crc kubenswrapper[4708]: I0203 07:26:55.996664 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-85cjv" event={"ID":"412c823d-20ba-42cb-9c05-70bee2ee89a2","Type":"ContainerStarted","Data":"3ed74be5574684387540bf6c9e2a8f152e859d2bef3cbcfcc6f4e6a4bde1a517"} Feb 03 07:26:56 crc kubenswrapper[4708]: I0203 07:26:56.002467 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5f6fa285-4374-4be5-b4cf-e3dd8ef56762","Type":"ContainerStarted","Data":"b777ca406e2ec704a2f002b92486ec23664885a7b54c4c0d76eaa4a71b00afa8"} Feb 03 07:26:56 crc kubenswrapper[4708]: I0203 07:26:56.008346 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=47.199035259 podStartE2EDuration="59.008330177s" podCreationTimestamp="2026-02-03 07:25:57 +0000 UTC" firstStartedPulling="2026-02-03 07:26:13.919551165 +0000 UTC m=+952.901497972" lastFinishedPulling="2026-02-03 07:26:25.728846083 +0000 UTC m=+964.710792890" observedRunningTime="2026-02-03 07:26:56.005007986 +0000 UTC m=+994.986954783" watchObservedRunningTime="2026-02-03 07:26:56.008330177 +0000 UTC m=+994.990276984" Feb 03 07:26:56 crc kubenswrapper[4708]: I0203 07:26:56.018039 4708 scope.go:117] "RemoveContainer" containerID="14994cf9a023cb7781145284f56231857e24519c82dbca7f71d87cdd36217c24" Feb 03 07:26:56 crc kubenswrapper[4708]: I0203 07:26:56.091658 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-zljlj" podStartSLOduration=7.094246607 podStartE2EDuration="13.091640187s" podCreationTimestamp="2026-02-03 07:26:43 +0000 UTC" firstStartedPulling="2026-02-03 07:26:49.374218012 +0000 UTC m=+988.356164819" lastFinishedPulling="2026-02-03 07:26:55.371611592 +0000 UTC m=+994.353558399" observedRunningTime="2026-02-03 07:26:56.064959734 +0000 UTC m=+995.046906561" watchObservedRunningTime="2026-02-03 07:26:56.091640187 +0000 UTC m=+995.073586994" Feb 03 07:26:56 crc kubenswrapper[4708]: I0203 07:26:56.094364 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=46.334747613 podStartE2EDuration="1m0.094355373s" podCreationTimestamp="2026-02-03 07:25:56 +0000 UTC" firstStartedPulling="2026-02-03 07:26:14.458116446 +0000 UTC m=+953.440063253" lastFinishedPulling="2026-02-03 07:26:28.217724196 +0000 UTC m=+967.199671013" observedRunningTime="2026-02-03 07:26:56.087479455 +0000 UTC m=+995.069426262" watchObservedRunningTime="2026-02-03 07:26:56.094355373 +0000 UTC m=+995.076302180" Feb 03 07:26:56 crc kubenswrapper[4708]: I0203 07:26:56.109089 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:56 crc kubenswrapper[4708]: I0203 07:26:56.140389 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-8pqzz"] Feb 03 07:26:56 crc kubenswrapper[4708]: I0203 07:26:56.158395 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-8pqzz"] Feb 03 07:26:56 crc kubenswrapper[4708]: I0203 07:26:56.173547 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4hkfj"] Feb 03 07:26:57 crc kubenswrapper[4708]: I0203 07:26:57.012467 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-44cl7" event={"ID":"2acc0e69-490c-4b5c-8486-bf0fd3fb6316","Type":"ContainerStarted","Data":"3d440a5ad42d5d83411234529faa52073674649de2f04b320582d28644d4f79d"} Feb 03 07:26:57 crc kubenswrapper[4708]: I0203 07:26:57.015222 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-85cjv" event={"ID":"412c823d-20ba-42cb-9c05-70bee2ee89a2","Type":"ContainerStarted","Data":"59d478c2fd90f7840a37dec3167cce1573203aa6de1841bd844c5d928490f0c2"} Feb 03 07:26:57 crc kubenswrapper[4708]: I0203 07:26:57.015401 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:26:57 crc kubenswrapper[4708]: I0203 07:26:57.030150 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-44cl7" podStartSLOduration=6.03013678 podStartE2EDuration="6.03013678s" podCreationTimestamp="2026-02-03 07:26:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:26:57.028697435 +0000 UTC m=+996.010644252" watchObservedRunningTime="2026-02-03 07:26:57.03013678 +0000 UTC m=+996.012083577" Feb 03 07:26:57 crc kubenswrapper[4708]: I0203 07:26:57.179167 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:57 crc kubenswrapper[4708]: I0203 07:26:57.179221 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:57 crc kubenswrapper[4708]: I0203 07:26:57.227976 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:57 crc kubenswrapper[4708]: I0203 07:26:57.249919 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-85cjv" podStartSLOduration=6.24990206 podStartE2EDuration="6.24990206s" podCreationTimestamp="2026-02-03 07:26:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:26:57.071588045 +0000 UTC m=+996.053534842" watchObservedRunningTime="2026-02-03 07:26:57.24990206 +0000 UTC m=+996.231848867" Feb 03 07:26:57 crc kubenswrapper[4708]: I0203 07:26:57.791877 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Feb 03 07:26:57 crc kubenswrapper[4708]: I0203 07:26:57.791916 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.028582 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d1a2b7e5-23d7-48f6-b144-d575da1e613d","Type":"ContainerStarted","Data":"ce8ccc7ddb8a64843ebab0fef22c53603beeac1fd29131a8a670e8312c03af5b"} Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.029564 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d1a2b7e5-23d7-48f6-b144-d575da1e613d","Type":"ContainerStarted","Data":"8d6c5fb3464a9e8822e8f7a3366303d9fd12a27ecd06457b96e4b700d86534d6"} Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.029001 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4hkfj" podUID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerName="registry-server" containerID="cri-o://c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3" gracePeriod=2 Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.029974 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.104339 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95221c6e-0d7b-4961-8d71-15134431bac0" path="/var/lib/kubelet/pods/95221c6e-0d7b-4961-8d71-15134431bac0/volumes" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.109505 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.129742 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=5.680688086 podStartE2EDuration="7.129721146s" podCreationTimestamp="2026-02-03 07:26:51 +0000 UTC" firstStartedPulling="2026-02-03 07:26:55.626526652 +0000 UTC m=+994.608473479" lastFinishedPulling="2026-02-03 07:26:57.075559722 +0000 UTC m=+996.057506539" observedRunningTime="2026-02-03 07:26:58.053845419 +0000 UTC m=+997.035792256" watchObservedRunningTime="2026-02-03 07:26:58.129721146 +0000 UTC m=+997.111667953" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.376639 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:26:58 crc kubenswrapper[4708]: E0203 07:26:58.376897 4708 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 03 07:26:58 crc kubenswrapper[4708]: E0203 07:26:58.376928 4708 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 03 07:26:58 crc kubenswrapper[4708]: E0203 07:26:58.376999 4708 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift podName:a0593ff7-ba15-46be-8879-70dc42f3beb2 nodeName:}" failed. No retries permitted until 2026-02-03 07:27:14.376978609 +0000 UTC m=+1013.358925426 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift") pod "swift-storage-0" (UID: "a0593ff7-ba15-46be-8879-70dc42f3beb2") : configmap "swift-ring-files" not found Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.534728 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-v89mq"] Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.572133 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.680737 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-catalog-content\") pod \"69f5f6ed-0270-407f-9b23-68c954638cb1\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.681157 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-utilities\") pod \"69f5f6ed-0270-407f-9b23-68c954638cb1\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.681201 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bd2bn\" (UniqueName: \"kubernetes.io/projected/69f5f6ed-0270-407f-9b23-68c954638cb1-kube-api-access-bd2bn\") pod \"69f5f6ed-0270-407f-9b23-68c954638cb1\" (UID: \"69f5f6ed-0270-407f-9b23-68c954638cb1\") " Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.682711 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-utilities" (OuterVolumeSpecName: "utilities") pod "69f5f6ed-0270-407f-9b23-68c954638cb1" (UID: "69f5f6ed-0270-407f-9b23-68c954638cb1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.686018 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.686060 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.686699 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69f5f6ed-0270-407f-9b23-68c954638cb1-kube-api-access-bd2bn" (OuterVolumeSpecName: "kube-api-access-bd2bn") pod "69f5f6ed-0270-407f-9b23-68c954638cb1" (UID: "69f5f6ed-0270-407f-9b23-68c954638cb1"). InnerVolumeSpecName "kube-api-access-bd2bn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.731057 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69f5f6ed-0270-407f-9b23-68c954638cb1" (UID: "69f5f6ed-0270-407f-9b23-68c954638cb1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.734656 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.783436 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bd2bn\" (UniqueName: \"kubernetes.io/projected/69f5f6ed-0270-407f-9b23-68c954638cb1-kube-api-access-bd2bn\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.783469 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:58 crc kubenswrapper[4708]: I0203 07:26:58.783479 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f5f6ed-0270-407f-9b23-68c954638cb1-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.050927 4708 generic.go:334] "Generic (PLEG): container finished" podID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerID="c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3" exitCode=0 Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.051070 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4hkfj" event={"ID":"69f5f6ed-0270-407f-9b23-68c954638cb1","Type":"ContainerDied","Data":"c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3"} Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.051171 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4hkfj" event={"ID":"69f5f6ed-0270-407f-9b23-68c954638cb1","Type":"ContainerDied","Data":"7fc80a95814304ba06eff0539fda05d80ddf3d74ef8e1f6d12bfa75b6e9f8cba"} Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.051205 4708 scope.go:117] "RemoveContainer" containerID="c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.052372 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4hkfj" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.077847 4708 scope.go:117] "RemoveContainer" containerID="6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.095981 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4hkfj"] Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.104820 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4hkfj"] Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.113165 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.117496 4708 scope.go:117] "RemoveContainer" containerID="bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.162836 4708 scope.go:117] "RemoveContainer" containerID="c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3" Feb 03 07:26:59 crc kubenswrapper[4708]: E0203 07:26:59.163215 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3\": container with ID starting with c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3 not found: ID does not exist" containerID="c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.163249 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3"} err="failed to get container status \"c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3\": rpc error: code = NotFound desc = could not find container \"c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3\": container with ID starting with c3ef8d5a550fbd82535c75860d478d404227eeb4d6195519488ff17b704c67b3 not found: ID does not exist" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.163271 4708 scope.go:117] "RemoveContainer" containerID="6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d" Feb 03 07:26:59 crc kubenswrapper[4708]: E0203 07:26:59.166664 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d\": container with ID starting with 6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d not found: ID does not exist" containerID="6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.166699 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d"} err="failed to get container status \"6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d\": rpc error: code = NotFound desc = could not find container \"6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d\": container with ID starting with 6784448ded9bf36fc7cd04ba61d2af8056e34fddbd2c4822fc5f429fd412d16d not found: ID does not exist" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.166721 4708 scope.go:117] "RemoveContainer" containerID="bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde" Feb 03 07:26:59 crc kubenswrapper[4708]: E0203 07:26:59.167106 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde\": container with ID starting with bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde not found: ID does not exist" containerID="bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.167280 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde"} err="failed to get container status \"bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde\": rpc error: code = NotFound desc = could not find container \"bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde\": container with ID starting with bb786ba9c4b1a2f9bfade13bf99f74a3868790a023c942d4a5d39d7a22611fde not found: ID does not exist" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.286588 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.287009 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Feb 03 07:26:59 crc kubenswrapper[4708]: I0203 07:26:59.562191 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-pb4xp" podUID="3b5a2d58-5ebb-4838-a798-bc280fe99951" containerName="ovn-controller" probeResult="failure" output=< Feb 03 07:26:59 crc kubenswrapper[4708]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 03 07:26:59 crc kubenswrapper[4708]: > Feb 03 07:27:00 crc kubenswrapper[4708]: I0203 07:27:00.059539 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-v89mq" podUID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerName="registry-server" containerID="cri-o://b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491" gracePeriod=2 Feb 03 07:27:00 crc kubenswrapper[4708]: I0203 07:27:00.101598 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69f5f6ed-0270-407f-9b23-68c954638cb1" path="/var/lib/kubelet/pods/69f5f6ed-0270-407f-9b23-68c954638cb1/volumes" Feb 03 07:27:00 crc kubenswrapper[4708]: I0203 07:27:00.985658 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.044005 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.071740 4708 generic.go:334] "Generic (PLEG): container finished" podID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerID="b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491" exitCode=0 Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.071778 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v89mq" event={"ID":"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40","Type":"ContainerDied","Data":"b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491"} Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.071821 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v89mq" event={"ID":"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40","Type":"ContainerDied","Data":"41e42fb54a8c7af5e4d9ba64c36cf346dda48ec943ac2fd1c592ba1563e5995d"} Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.071840 4708 scope.go:117] "RemoveContainer" containerID="b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.071924 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v89mq" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.110417 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.122844 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkcv4\" (UniqueName: \"kubernetes.io/projected/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-kube-api-access-zkcv4\") pod \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.122919 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-utilities\") pod \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.122988 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-catalog-content\") pod \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\" (UID: \"1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40\") " Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.124759 4708 scope.go:117] "RemoveContainer" containerID="dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.125752 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-utilities" (OuterVolumeSpecName: "utilities") pod "1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" (UID: "1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.131587 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-kube-api-access-zkcv4" (OuterVolumeSpecName: "kube-api-access-zkcv4") pod "1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" (UID: "1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40"). InnerVolumeSpecName "kube-api-access-zkcv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.150447 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" (UID: "1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.218949 4708 scope.go:117] "RemoveContainer" containerID="5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.226477 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkcv4\" (UniqueName: \"kubernetes.io/projected/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-kube-api-access-zkcv4\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.226530 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.226544 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.247208 4708 scope.go:117] "RemoveContainer" containerID="b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491" Feb 03 07:27:01 crc kubenswrapper[4708]: E0203 07:27:01.247769 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491\": container with ID starting with b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491 not found: ID does not exist" containerID="b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.247837 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491"} err="failed to get container status \"b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491\": rpc error: code = NotFound desc = could not find container \"b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491\": container with ID starting with b715b4746ea1418bf39b68684e3d53da66dc508f02ca3216437f915911c79491 not found: ID does not exist" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.247871 4708 scope.go:117] "RemoveContainer" containerID="dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514" Feb 03 07:27:01 crc kubenswrapper[4708]: E0203 07:27:01.248642 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514\": container with ID starting with dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514 not found: ID does not exist" containerID="dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.248683 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514"} err="failed to get container status \"dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514\": rpc error: code = NotFound desc = could not find container \"dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514\": container with ID starting with dc4e36a05b2beef376f9d3dcdb32a3b72cff6c073a4762ae27b8baceb95eb514 not found: ID does not exist" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.248709 4708 scope.go:117] "RemoveContainer" containerID="5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64" Feb 03 07:27:01 crc kubenswrapper[4708]: E0203 07:27:01.249105 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64\": container with ID starting with 5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64 not found: ID does not exist" containerID="5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.249134 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64"} err="failed to get container status \"5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64\": rpc error: code = NotFound desc = could not find container \"5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64\": container with ID starting with 5ccd4769c35303f024bd723654bbe37fad1894226b766ad0632411c4ac240c64 not found: ID does not exist" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.402410 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-v89mq"] Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.408373 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-v89mq"] Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.755968 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.828298 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zwvx2"] Feb 03 07:27:01 crc kubenswrapper[4708]: I0203 07:27:01.828583 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" podUID="193dd272-5329-401f-a02c-0910a3d98246" containerName="dnsmasq-dns" containerID="cri-o://ac4da0274520dc56183b8454cca97c4326ecceaaeca78b26a1e9deae45c58aa8" gracePeriod=10 Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.086913 4708 generic.go:334] "Generic (PLEG): container finished" podID="2572b4b8-5df3-4d81-9bd7-8ef427c6d945" containerID="2beb5c3e89e59e9e8aba167e3cad443c61c86c2d6b1a629e4cbf9f24206b0baa" exitCode=0 Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.087366 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2572b4b8-5df3-4d81-9bd7-8ef427c6d945","Type":"ContainerDied","Data":"2beb5c3e89e59e9e8aba167e3cad443c61c86c2d6b1a629e4cbf9f24206b0baa"} Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.100225 4708 generic.go:334] "Generic (PLEG): container finished" podID="193dd272-5329-401f-a02c-0910a3d98246" containerID="ac4da0274520dc56183b8454cca97c4326ecceaaeca78b26a1e9deae45c58aa8" exitCode=0 Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.103325 4708 generic.go:334] "Generic (PLEG): container finished" podID="0edbabdf-99f1-49b3-83ee-48ad17467638" containerID="b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa" exitCode=0 Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.105927 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" path="/var/lib/kubelet/pods/1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40/volumes" Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.106874 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.106907 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" event={"ID":"193dd272-5329-401f-a02c-0910a3d98246","Type":"ContainerDied","Data":"ac4da0274520dc56183b8454cca97c4326ecceaaeca78b26a1e9deae45c58aa8"} Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.106928 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0edbabdf-99f1-49b3-83ee-48ad17467638","Type":"ContainerDied","Data":"b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa"} Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.363379 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.449335 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skjtt\" (UniqueName: \"kubernetes.io/projected/193dd272-5329-401f-a02c-0910a3d98246-kube-api-access-skjtt\") pod \"193dd272-5329-401f-a02c-0910a3d98246\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.449378 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-dns-svc\") pod \"193dd272-5329-401f-a02c-0910a3d98246\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.449527 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-config\") pod \"193dd272-5329-401f-a02c-0910a3d98246\" (UID: \"193dd272-5329-401f-a02c-0910a3d98246\") " Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.459631 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/193dd272-5329-401f-a02c-0910a3d98246-kube-api-access-skjtt" (OuterVolumeSpecName: "kube-api-access-skjtt") pod "193dd272-5329-401f-a02c-0910a3d98246" (UID: "193dd272-5329-401f-a02c-0910a3d98246"). InnerVolumeSpecName "kube-api-access-skjtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.501685 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "193dd272-5329-401f-a02c-0910a3d98246" (UID: "193dd272-5329-401f-a02c-0910a3d98246"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.503609 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-config" (OuterVolumeSpecName: "config") pod "193dd272-5329-401f-a02c-0910a3d98246" (UID: "193dd272-5329-401f-a02c-0910a3d98246"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.551475 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skjtt\" (UniqueName: \"kubernetes.io/projected/193dd272-5329-401f-a02c-0910a3d98246-kube-api-access-skjtt\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.551503 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.551512 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/193dd272-5329-401f-a02c-0910a3d98246-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.938401 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pxgzt"] Feb 03 07:27:02 crc kubenswrapper[4708]: I0203 07:27:02.938693 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pxgzt" podUID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerName="registry-server" containerID="cri-o://cb392ead4fab52dfc293be5e8f9dcca074189cc653525be55e5484146a35e2e6" gracePeriod=2 Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.114265 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.114253 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-zwvx2" event={"ID":"193dd272-5329-401f-a02c-0910a3d98246","Type":"ContainerDied","Data":"7540af77898882824276bd81d425eba4b44f42f1865e23b7fff09424b4c51a15"} Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.114400 4708 scope.go:117] "RemoveContainer" containerID="ac4da0274520dc56183b8454cca97c4326ecceaaeca78b26a1e9deae45c58aa8" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.117513 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0edbabdf-99f1-49b3-83ee-48ad17467638","Type":"ContainerStarted","Data":"7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870"} Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.117986 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.123699 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2572b4b8-5df3-4d81-9bd7-8ef427c6d945","Type":"ContainerStarted","Data":"b5499106701aba8c5e5b96ad9f798143bd1ca1dbb0a5be7b4f68bf4634038c1e"} Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.123932 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.126424 4708 generic.go:334] "Generic (PLEG): container finished" podID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerID="cb392ead4fab52dfc293be5e8f9dcca074189cc653525be55e5484146a35e2e6" exitCode=0 Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.126460 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxgzt" event={"ID":"271e64f1-20a8-42ad-962a-0b498d561cdd","Type":"ContainerDied","Data":"cb392ead4fab52dfc293be5e8f9dcca074189cc653525be55e5484146a35e2e6"} Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.151067 4708 scope.go:117] "RemoveContainer" containerID="e80ac35c6994a560cc7047837a599d2d417780a770391ffe660af4d71213783d" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.151298 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=54.861744357 podStartE2EDuration="1m9.151277769s" podCreationTimestamp="2026-02-03 07:25:54 +0000 UTC" firstStartedPulling="2026-02-03 07:26:13.928117182 +0000 UTC m=+952.910063989" lastFinishedPulling="2026-02-03 07:26:28.217650594 +0000 UTC m=+967.199597401" observedRunningTime="2026-02-03 07:27:03.149399043 +0000 UTC m=+1002.131345860" watchObservedRunningTime="2026-02-03 07:27:03.151277769 +0000 UTC m=+1002.133224576" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.186792 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=56.22614644 podStartE2EDuration="1m8.186772337s" podCreationTimestamp="2026-02-03 07:25:55 +0000 UTC" firstStartedPulling="2026-02-03 07:26:12.868276619 +0000 UTC m=+951.850223466" lastFinishedPulling="2026-02-03 07:26:24.828902546 +0000 UTC m=+963.810849363" observedRunningTime="2026-02-03 07:27:03.172870507 +0000 UTC m=+1002.154817344" watchObservedRunningTime="2026-02-03 07:27:03.186772337 +0000 UTC m=+1002.168719144" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.198516 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zwvx2"] Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.205368 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-zwvx2"] Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.371730 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.452317 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.544923 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.673358 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-utilities\") pod \"271e64f1-20a8-42ad-962a-0b498d561cdd\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.673406 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-catalog-content\") pod \"271e64f1-20a8-42ad-962a-0b498d561cdd\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.673436 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8bf4\" (UniqueName: \"kubernetes.io/projected/271e64f1-20a8-42ad-962a-0b498d561cdd-kube-api-access-r8bf4\") pod \"271e64f1-20a8-42ad-962a-0b498d561cdd\" (UID: \"271e64f1-20a8-42ad-962a-0b498d561cdd\") " Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.674316 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-utilities" (OuterVolumeSpecName: "utilities") pod "271e64f1-20a8-42ad-962a-0b498d561cdd" (UID: "271e64f1-20a8-42ad-962a-0b498d561cdd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.694510 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/271e64f1-20a8-42ad-962a-0b498d561cdd-kube-api-access-r8bf4" (OuterVolumeSpecName: "kube-api-access-r8bf4") pod "271e64f1-20a8-42ad-962a-0b498d561cdd" (UID: "271e64f1-20a8-42ad-962a-0b498d561cdd"). InnerVolumeSpecName "kube-api-access-r8bf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.775257 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.775337 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8bf4\" (UniqueName: \"kubernetes.io/projected/271e64f1-20a8-42ad-962a-0b498d561cdd-kube-api-access-r8bf4\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.807371 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "271e64f1-20a8-42ad-962a-0b498d561cdd" (UID: "271e64f1-20a8-42ad-962a-0b498d561cdd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:27:03 crc kubenswrapper[4708]: I0203 07:27:03.877496 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/271e64f1-20a8-42ad-962a-0b498d561cdd-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.107883 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="193dd272-5329-401f-a02c-0910a3d98246" path="/var/lib/kubelet/pods/193dd272-5329-401f-a02c-0910a3d98246/volumes" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.138108 4708 generic.go:334] "Generic (PLEG): container finished" podID="fdec39a4-6222-4122-901f-4a6603afc348" containerID="aadd6bf8b71b75793e57365b77d3240a13a8fc3aa2124a10a66c1e6c26111c20" exitCode=0 Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.138168 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zljlj" event={"ID":"fdec39a4-6222-4122-901f-4a6603afc348","Type":"ContainerDied","Data":"aadd6bf8b71b75793e57365b77d3240a13a8fc3aa2124a10a66c1e6c26111c20"} Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.142100 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxgzt" event={"ID":"271e64f1-20a8-42ad-962a-0b498d561cdd","Type":"ContainerDied","Data":"32b4a0a3d807373d61294a63e9ac3f78cbabb3d70be5674661a956eb8df37478"} Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.142262 4708 scope.go:117] "RemoveContainer" containerID="cb392ead4fab52dfc293be5e8f9dcca074189cc653525be55e5484146a35e2e6" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.142161 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxgzt" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.176366 4708 scope.go:117] "RemoveContainer" containerID="c024c4ec79c6c814bcff0d6c07250edffa31c916e9dd658de321126f03bc837f" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.182436 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pxgzt"] Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.194829 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pxgzt"] Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.199532 4708 scope.go:117] "RemoveContainer" containerID="4e31452224840996574cce8f443004fa472717dafc618452ca4d80aeab7c15ad" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.566958 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-pb4xp" podUID="3b5a2d58-5ebb-4838-a798-bc280fe99951" containerName="ovn-controller" probeResult="failure" output=< Feb 03 07:27:04 crc kubenswrapper[4708]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 03 07:27:04 crc kubenswrapper[4708]: > Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.589585 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.829979 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-487ld"] Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830368 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95221c6e-0d7b-4961-8d71-15134431bac0" containerName="init" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830386 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="95221c6e-0d7b-4961-8d71-15134431bac0" containerName="init" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830398 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerName="extract-utilities" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830404 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerName="extract-utilities" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830417 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerName="extract-content" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830425 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerName="extract-content" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830436 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="193dd272-5329-401f-a02c-0910a3d98246" containerName="init" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830444 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="193dd272-5329-401f-a02c-0910a3d98246" containerName="init" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830455 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerName="extract-content" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830460 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerName="extract-content" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830475 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerName="extract-content" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830481 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerName="extract-content" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830492 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="193dd272-5329-401f-a02c-0910a3d98246" containerName="dnsmasq-dns" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830498 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="193dd272-5329-401f-a02c-0910a3d98246" containerName="dnsmasq-dns" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830505 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerName="registry-server" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830511 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerName="registry-server" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830521 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerName="registry-server" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830528 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerName="registry-server" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830536 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerName="registry-server" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830547 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerName="registry-server" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830563 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95221c6e-0d7b-4961-8d71-15134431bac0" containerName="dnsmasq-dns" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830570 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="95221c6e-0d7b-4961-8d71-15134431bac0" containerName="dnsmasq-dns" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830578 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerName="extract-utilities" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830584 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerName="extract-utilities" Feb 03 07:27:04 crc kubenswrapper[4708]: E0203 07:27:04.830595 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerName="extract-utilities" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830600 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerName="extract-utilities" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830770 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="95221c6e-0d7b-4961-8d71-15134431bac0" containerName="dnsmasq-dns" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830784 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="69f5f6ed-0270-407f-9b23-68c954638cb1" containerName="registry-server" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830816 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="271e64f1-20a8-42ad-962a-0b498d561cdd" containerName="registry-server" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830827 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="193dd272-5329-401f-a02c-0910a3d98246" containerName="dnsmasq-dns" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.830843 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a7c44b1-d3a9-43f3-8574-ecffdeaaaf40" containerName="registry-server" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.831371 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-487ld" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.866925 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-9889-account-create-update-xfq6v"] Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.868186 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9889-account-create-update-xfq6v" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.870583 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.884881 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-487ld"] Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.891217 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9889-account-create-update-xfq6v"] Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.892880 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/acb52368-cf93-4c82-926a-665f665ed84a-operator-scripts\") pod \"glance-db-create-487ld\" (UID: \"acb52368-cf93-4c82-926a-665f665ed84a\") " pod="openstack/glance-db-create-487ld" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.892911 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d597s\" (UniqueName: \"kubernetes.io/projected/acb52368-cf93-4c82-926a-665f665ed84a-kube-api-access-d597s\") pod \"glance-db-create-487ld\" (UID: \"acb52368-cf93-4c82-926a-665f665ed84a\") " pod="openstack/glance-db-create-487ld" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.993971 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9glg5\" (UniqueName: \"kubernetes.io/projected/7ecd1be3-40af-41af-bbc6-78a346f02c44-kube-api-access-9glg5\") pod \"glance-9889-account-create-update-xfq6v\" (UID: \"7ecd1be3-40af-41af-bbc6-78a346f02c44\") " pod="openstack/glance-9889-account-create-update-xfq6v" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.994312 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/acb52368-cf93-4c82-926a-665f665ed84a-operator-scripts\") pod \"glance-db-create-487ld\" (UID: \"acb52368-cf93-4c82-926a-665f665ed84a\") " pod="openstack/glance-db-create-487ld" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.994376 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d597s\" (UniqueName: \"kubernetes.io/projected/acb52368-cf93-4c82-926a-665f665ed84a-kube-api-access-d597s\") pod \"glance-db-create-487ld\" (UID: \"acb52368-cf93-4c82-926a-665f665ed84a\") " pod="openstack/glance-db-create-487ld" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.994522 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ecd1be3-40af-41af-bbc6-78a346f02c44-operator-scripts\") pod \"glance-9889-account-create-update-xfq6v\" (UID: \"7ecd1be3-40af-41af-bbc6-78a346f02c44\") " pod="openstack/glance-9889-account-create-update-xfq6v" Feb 03 07:27:04 crc kubenswrapper[4708]: I0203 07:27:04.995032 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/acb52368-cf93-4c82-926a-665f665ed84a-operator-scripts\") pod \"glance-db-create-487ld\" (UID: \"acb52368-cf93-4c82-926a-665f665ed84a\") " pod="openstack/glance-db-create-487ld" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.028633 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d597s\" (UniqueName: \"kubernetes.io/projected/acb52368-cf93-4c82-926a-665f665ed84a-kube-api-access-d597s\") pod \"glance-db-create-487ld\" (UID: \"acb52368-cf93-4c82-926a-665f665ed84a\") " pod="openstack/glance-db-create-487ld" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.095551 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9glg5\" (UniqueName: \"kubernetes.io/projected/7ecd1be3-40af-41af-bbc6-78a346f02c44-kube-api-access-9glg5\") pod \"glance-9889-account-create-update-xfq6v\" (UID: \"7ecd1be3-40af-41af-bbc6-78a346f02c44\") " pod="openstack/glance-9889-account-create-update-xfq6v" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.095683 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ecd1be3-40af-41af-bbc6-78a346f02c44-operator-scripts\") pod \"glance-9889-account-create-update-xfq6v\" (UID: \"7ecd1be3-40af-41af-bbc6-78a346f02c44\") " pod="openstack/glance-9889-account-create-update-xfq6v" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.096524 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ecd1be3-40af-41af-bbc6-78a346f02c44-operator-scripts\") pod \"glance-9889-account-create-update-xfq6v\" (UID: \"7ecd1be3-40af-41af-bbc6-78a346f02c44\") " pod="openstack/glance-9889-account-create-update-xfq6v" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.116461 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9glg5\" (UniqueName: \"kubernetes.io/projected/7ecd1be3-40af-41af-bbc6-78a346f02c44-kube-api-access-9glg5\") pod \"glance-9889-account-create-update-xfq6v\" (UID: \"7ecd1be3-40af-41af-bbc6-78a346f02c44\") " pod="openstack/glance-9889-account-create-update-xfq6v" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.157160 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-487ld" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.180927 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9889-account-create-update-xfq6v" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.337574 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ppsk2"] Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.338108 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ppsk2" podUID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerName="registry-server" containerID="cri-o://53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30" gracePeriod=2 Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.584231 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.716700 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-swiftconf\") pod \"fdec39a4-6222-4122-901f-4a6603afc348\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.716753 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-dispersionconf\") pod \"fdec39a4-6222-4122-901f-4a6603afc348\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.716773 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-combined-ca-bundle\") pod \"fdec39a4-6222-4122-901f-4a6603afc348\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.716863 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-scripts\") pod \"fdec39a4-6222-4122-901f-4a6603afc348\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.716942 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndjdn\" (UniqueName: \"kubernetes.io/projected/fdec39a4-6222-4122-901f-4a6603afc348-kube-api-access-ndjdn\") pod \"fdec39a4-6222-4122-901f-4a6603afc348\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.717021 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-ring-data-devices\") pod \"fdec39a4-6222-4122-901f-4a6603afc348\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.717042 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/fdec39a4-6222-4122-901f-4a6603afc348-etc-swift\") pod \"fdec39a4-6222-4122-901f-4a6603afc348\" (UID: \"fdec39a4-6222-4122-901f-4a6603afc348\") " Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.717526 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "fdec39a4-6222-4122-901f-4a6603afc348" (UID: "fdec39a4-6222-4122-901f-4a6603afc348"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.717913 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdec39a4-6222-4122-901f-4a6603afc348-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "fdec39a4-6222-4122-901f-4a6603afc348" (UID: "fdec39a4-6222-4122-901f-4a6603afc348"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.717978 4708 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-ring-data-devices\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.726287 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdec39a4-6222-4122-901f-4a6603afc348-kube-api-access-ndjdn" (OuterVolumeSpecName: "kube-api-access-ndjdn") pod "fdec39a4-6222-4122-901f-4a6603afc348" (UID: "fdec39a4-6222-4122-901f-4a6603afc348"). InnerVolumeSpecName "kube-api-access-ndjdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.742912 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9889-account-create-update-xfq6v"] Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.751508 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-scripts" (OuterVolumeSpecName: "scripts") pod "fdec39a4-6222-4122-901f-4a6603afc348" (UID: "fdec39a4-6222-4122-901f-4a6603afc348"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.758139 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fdec39a4-6222-4122-901f-4a6603afc348" (UID: "fdec39a4-6222-4122-901f-4a6603afc348"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.758166 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "fdec39a4-6222-4122-901f-4a6603afc348" (UID: "fdec39a4-6222-4122-901f-4a6603afc348"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.759637 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "fdec39a4-6222-4122-901f-4a6603afc348" (UID: "fdec39a4-6222-4122-901f-4a6603afc348"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.762355 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-487ld"] Feb 03 07:27:05 crc kubenswrapper[4708]: W0203 07:27:05.767327 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacb52368_cf93_4c82_926a_665f665ed84a.slice/crio-222815727c0b1de3587ee12420db959945f3080dc5998a37d2f591e168e97a16 WatchSource:0}: Error finding container 222815727c0b1de3587ee12420db959945f3080dc5998a37d2f591e168e97a16: Status 404 returned error can't find the container with id 222815727c0b1de3587ee12420db959945f3080dc5998a37d2f591e168e97a16 Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.819490 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fdec39a4-6222-4122-901f-4a6603afc348-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.819524 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndjdn\" (UniqueName: \"kubernetes.io/projected/fdec39a4-6222-4122-901f-4a6603afc348-kube-api-access-ndjdn\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.819537 4708 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/fdec39a4-6222-4122-901f-4a6603afc348-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.819548 4708 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-swiftconf\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.819559 4708 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-dispersionconf\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.819569 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdec39a4-6222-4122-901f-4a6603afc348-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:05 crc kubenswrapper[4708]: I0203 07:27:05.980597 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.028788 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtkwj\" (UniqueName: \"kubernetes.io/projected/0f490b43-b9f5-4e99-89a1-e7c75a45b487-kube-api-access-vtkwj\") pod \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.028860 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-catalog-content\") pod \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.028991 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-utilities\") pod \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\" (UID: \"0f490b43-b9f5-4e99-89a1-e7c75a45b487\") " Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.030578 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-utilities" (OuterVolumeSpecName: "utilities") pod "0f490b43-b9f5-4e99-89a1-e7c75a45b487" (UID: "0f490b43-b9f5-4e99-89a1-e7c75a45b487"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.036553 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f490b43-b9f5-4e99-89a1-e7c75a45b487-kube-api-access-vtkwj" (OuterVolumeSpecName: "kube-api-access-vtkwj") pod "0f490b43-b9f5-4e99-89a1-e7c75a45b487" (UID: "0f490b43-b9f5-4e99-89a1-e7c75a45b487"). InnerVolumeSpecName "kube-api-access-vtkwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.080914 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0f490b43-b9f5-4e99-89a1-e7c75a45b487" (UID: "0f490b43-b9f5-4e99-89a1-e7c75a45b487"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.103075 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="271e64f1-20a8-42ad-962a-0b498d561cdd" path="/var/lib/kubelet/pods/271e64f1-20a8-42ad-962a-0b498d561cdd/volumes" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.130772 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtkwj\" (UniqueName: \"kubernetes.io/projected/0f490b43-b9f5-4e99-89a1-e7c75a45b487-kube-api-access-vtkwj\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.130892 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.130906 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f490b43-b9f5-4e99-89a1-e7c75a45b487-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.159603 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9889-account-create-update-xfq6v" event={"ID":"7ecd1be3-40af-41af-bbc6-78a346f02c44","Type":"ContainerStarted","Data":"ccb5854ab49b90b36207a116c2ced1149c17ac39facd9a9186817db24360ab5d"} Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.159653 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9889-account-create-update-xfq6v" event={"ID":"7ecd1be3-40af-41af-bbc6-78a346f02c44","Type":"ContainerStarted","Data":"76e688295c8d576e889ab589ce51ae9370810e999cacf64f2ca95c9d4867d19d"} Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.164358 4708 generic.go:334] "Generic (PLEG): container finished" podID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerID="53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30" exitCode=0 Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.164437 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ppsk2" event={"ID":"0f490b43-b9f5-4e99-89a1-e7c75a45b487","Type":"ContainerDied","Data":"53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30"} Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.164480 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ppsk2" event={"ID":"0f490b43-b9f5-4e99-89a1-e7c75a45b487","Type":"ContainerDied","Data":"ba5175defb60be7bc96caf3cc589c565bb0a02fce9e9f67f1c3b4ef0073cc233"} Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.164505 4708 scope.go:117] "RemoveContainer" containerID="53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.164667 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ppsk2" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.169085 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zljlj" event={"ID":"fdec39a4-6222-4122-901f-4a6603afc348","Type":"ContainerDied","Data":"d6e108b77428f4041a914087f37a87cc499f750be6ef27a6b773dfe696cd67fd"} Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.169111 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zljlj" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.169121 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6e108b77428f4041a914087f37a87cc499f750be6ef27a6b773dfe696cd67fd" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.173315 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-487ld" event={"ID":"acb52368-cf93-4c82-926a-665f665ed84a","Type":"ContainerStarted","Data":"f38feecda0106269e9d4981e28167e1c1768115c78013f575c511383f10dcc17"} Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.173393 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-487ld" event={"ID":"acb52368-cf93-4c82-926a-665f665ed84a","Type":"ContainerStarted","Data":"222815727c0b1de3587ee12420db959945f3080dc5998a37d2f591e168e97a16"} Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.187158 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-9889-account-create-update-xfq6v" podStartSLOduration=2.187141063 podStartE2EDuration="2.187141063s" podCreationTimestamp="2026-02-03 07:27:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:06.184600151 +0000 UTC m=+1005.166546968" watchObservedRunningTime="2026-02-03 07:27:06.187141063 +0000 UTC m=+1005.169087870" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.188255 4708 scope.go:117] "RemoveContainer" containerID="baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.209942 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-487ld" podStartSLOduration=2.20991649 podStartE2EDuration="2.20991649s" podCreationTimestamp="2026-02-03 07:27:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:06.204492918 +0000 UTC m=+1005.186439725" watchObservedRunningTime="2026-02-03 07:27:06.20991649 +0000 UTC m=+1005.191863307" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.212728 4708 scope.go:117] "RemoveContainer" containerID="9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.224219 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ppsk2"] Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.232072 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ppsk2"] Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.286495 4708 scope.go:117] "RemoveContainer" containerID="53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30" Feb 03 07:27:06 crc kubenswrapper[4708]: E0203 07:27:06.286923 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30\": container with ID starting with 53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30 not found: ID does not exist" containerID="53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.286972 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30"} err="failed to get container status \"53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30\": rpc error: code = NotFound desc = could not find container \"53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30\": container with ID starting with 53a21fb701c637c26d897889cbe13537de1e20095607a20d3ae5cad0c880fc30 not found: ID does not exist" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.287014 4708 scope.go:117] "RemoveContainer" containerID="baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba" Feb 03 07:27:06 crc kubenswrapper[4708]: E0203 07:27:06.287352 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba\": container with ID starting with baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba not found: ID does not exist" containerID="baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.287387 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba"} err="failed to get container status \"baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba\": rpc error: code = NotFound desc = could not find container \"baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba\": container with ID starting with baa471322ac63f35c76c00b9f407a79eef1ab05e5057df14c759a92ae361f6ba not found: ID does not exist" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.287413 4708 scope.go:117] "RemoveContainer" containerID="9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588" Feb 03 07:27:06 crc kubenswrapper[4708]: E0203 07:27:06.287621 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588\": container with ID starting with 9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588 not found: ID does not exist" containerID="9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.287641 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588"} err="failed to get container status \"9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588\": rpc error: code = NotFound desc = could not find container \"9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588\": container with ID starting with 9d1de73993e1ef02b38c23e2492e31014eb1de722b8e486ff493e2cd0cec2588 not found: ID does not exist" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.473206 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-rcz7k"] Feb 03 07:27:06 crc kubenswrapper[4708]: E0203 07:27:06.473963 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdec39a4-6222-4122-901f-4a6603afc348" containerName="swift-ring-rebalance" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.474034 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdec39a4-6222-4122-901f-4a6603afc348" containerName="swift-ring-rebalance" Feb 03 07:27:06 crc kubenswrapper[4708]: E0203 07:27:06.475951 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerName="extract-utilities" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.475992 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerName="extract-utilities" Feb 03 07:27:06 crc kubenswrapper[4708]: E0203 07:27:06.476017 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerName="registry-server" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.476025 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerName="registry-server" Feb 03 07:27:06 crc kubenswrapper[4708]: E0203 07:27:06.476059 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerName="extract-content" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.476068 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerName="extract-content" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.476403 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" containerName="registry-server" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.476428 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdec39a4-6222-4122-901f-4a6603afc348" containerName="swift-ring-rebalance" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.477120 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rcz7k" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.479760 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.500510 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rcz7k"] Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.536879 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5606ed85-307e-4896-b509-f4fee8359589-operator-scripts\") pod \"root-account-create-update-rcz7k\" (UID: \"5606ed85-307e-4896-b509-f4fee8359589\") " pod="openstack/root-account-create-update-rcz7k" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.537055 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx74j\" (UniqueName: \"kubernetes.io/projected/5606ed85-307e-4896-b509-f4fee8359589-kube-api-access-lx74j\") pod \"root-account-create-update-rcz7k\" (UID: \"5606ed85-307e-4896-b509-f4fee8359589\") " pod="openstack/root-account-create-update-rcz7k" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.638193 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5606ed85-307e-4896-b509-f4fee8359589-operator-scripts\") pod \"root-account-create-update-rcz7k\" (UID: \"5606ed85-307e-4896-b509-f4fee8359589\") " pod="openstack/root-account-create-update-rcz7k" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.638282 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx74j\" (UniqueName: \"kubernetes.io/projected/5606ed85-307e-4896-b509-f4fee8359589-kube-api-access-lx74j\") pod \"root-account-create-update-rcz7k\" (UID: \"5606ed85-307e-4896-b509-f4fee8359589\") " pod="openstack/root-account-create-update-rcz7k" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.639301 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5606ed85-307e-4896-b509-f4fee8359589-operator-scripts\") pod \"root-account-create-update-rcz7k\" (UID: \"5606ed85-307e-4896-b509-f4fee8359589\") " pod="openstack/root-account-create-update-rcz7k" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.659735 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx74j\" (UniqueName: \"kubernetes.io/projected/5606ed85-307e-4896-b509-f4fee8359589-kube-api-access-lx74j\") pod \"root-account-create-update-rcz7k\" (UID: \"5606ed85-307e-4896-b509-f4fee8359589\") " pod="openstack/root-account-create-update-rcz7k" Feb 03 07:27:06 crc kubenswrapper[4708]: I0203 07:27:06.791994 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rcz7k" Feb 03 07:27:07 crc kubenswrapper[4708]: I0203 07:27:07.181323 4708 generic.go:334] "Generic (PLEG): container finished" podID="acb52368-cf93-4c82-926a-665f665ed84a" containerID="f38feecda0106269e9d4981e28167e1c1768115c78013f575c511383f10dcc17" exitCode=0 Feb 03 07:27:07 crc kubenswrapper[4708]: I0203 07:27:07.181475 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-487ld" event={"ID":"acb52368-cf93-4c82-926a-665f665ed84a","Type":"ContainerDied","Data":"f38feecda0106269e9d4981e28167e1c1768115c78013f575c511383f10dcc17"} Feb 03 07:27:07 crc kubenswrapper[4708]: I0203 07:27:07.184664 4708 generic.go:334] "Generic (PLEG): container finished" podID="7ecd1be3-40af-41af-bbc6-78a346f02c44" containerID="ccb5854ab49b90b36207a116c2ced1149c17ac39facd9a9186817db24360ab5d" exitCode=0 Feb 03 07:27:07 crc kubenswrapper[4708]: I0203 07:27:07.184748 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9889-account-create-update-xfq6v" event={"ID":"7ecd1be3-40af-41af-bbc6-78a346f02c44","Type":"ContainerDied","Data":"ccb5854ab49b90b36207a116c2ced1149c17ac39facd9a9186817db24360ab5d"} Feb 03 07:27:07 crc kubenswrapper[4708]: I0203 07:27:07.248402 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rcz7k"] Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.113342 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f490b43-b9f5-4e99-89a1-e7c75a45b487" path="/var/lib/kubelet/pods/0f490b43-b9f5-4e99-89a1-e7c75a45b487/volumes" Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.194752 4708 generic.go:334] "Generic (PLEG): container finished" podID="5606ed85-307e-4896-b509-f4fee8359589" containerID="963007256d1b21421042649674668b2a3384dfd620dd3734b27ce160a1bd0d6c" exitCode=0 Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.195174 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rcz7k" event={"ID":"5606ed85-307e-4896-b509-f4fee8359589","Type":"ContainerDied","Data":"963007256d1b21421042649674668b2a3384dfd620dd3734b27ce160a1bd0d6c"} Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.195197 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rcz7k" event={"ID":"5606ed85-307e-4896-b509-f4fee8359589","Type":"ContainerStarted","Data":"53c83a30a489855044a459c1c090a99e252fc9d1b5625fc77384cf4fd8e686d6"} Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.678931 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-487ld" Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.685632 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9889-account-create-update-xfq6v" Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.773596 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/acb52368-cf93-4c82-926a-665f665ed84a-operator-scripts\") pod \"acb52368-cf93-4c82-926a-665f665ed84a\" (UID: \"acb52368-cf93-4c82-926a-665f665ed84a\") " Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.773786 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9glg5\" (UniqueName: \"kubernetes.io/projected/7ecd1be3-40af-41af-bbc6-78a346f02c44-kube-api-access-9glg5\") pod \"7ecd1be3-40af-41af-bbc6-78a346f02c44\" (UID: \"7ecd1be3-40af-41af-bbc6-78a346f02c44\") " Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.773890 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d597s\" (UniqueName: \"kubernetes.io/projected/acb52368-cf93-4c82-926a-665f665ed84a-kube-api-access-d597s\") pod \"acb52368-cf93-4c82-926a-665f665ed84a\" (UID: \"acb52368-cf93-4c82-926a-665f665ed84a\") " Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.773918 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ecd1be3-40af-41af-bbc6-78a346f02c44-operator-scripts\") pod \"7ecd1be3-40af-41af-bbc6-78a346f02c44\" (UID: \"7ecd1be3-40af-41af-bbc6-78a346f02c44\") " Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.774290 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acb52368-cf93-4c82-926a-665f665ed84a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "acb52368-cf93-4c82-926a-665f665ed84a" (UID: "acb52368-cf93-4c82-926a-665f665ed84a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.774560 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ecd1be3-40af-41af-bbc6-78a346f02c44-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7ecd1be3-40af-41af-bbc6-78a346f02c44" (UID: "7ecd1be3-40af-41af-bbc6-78a346f02c44"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.774816 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ecd1be3-40af-41af-bbc6-78a346f02c44-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.774835 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/acb52368-cf93-4c82-926a-665f665ed84a-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.780891 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ecd1be3-40af-41af-bbc6-78a346f02c44-kube-api-access-9glg5" (OuterVolumeSpecName: "kube-api-access-9glg5") pod "7ecd1be3-40af-41af-bbc6-78a346f02c44" (UID: "7ecd1be3-40af-41af-bbc6-78a346f02c44"). InnerVolumeSpecName "kube-api-access-9glg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.781637 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acb52368-cf93-4c82-926a-665f665ed84a-kube-api-access-d597s" (OuterVolumeSpecName: "kube-api-access-d597s") pod "acb52368-cf93-4c82-926a-665f665ed84a" (UID: "acb52368-cf93-4c82-926a-665f665ed84a"). InnerVolumeSpecName "kube-api-access-d597s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.876449 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9glg5\" (UniqueName: \"kubernetes.io/projected/7ecd1be3-40af-41af-bbc6-78a346f02c44-kube-api-access-9glg5\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:08 crc kubenswrapper[4708]: I0203 07:27:08.876642 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d597s\" (UniqueName: \"kubernetes.io/projected/acb52368-cf93-4c82-926a-665f665ed84a-kube-api-access-d597s\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.129401 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-fjkqx"] Feb 03 07:27:09 crc kubenswrapper[4708]: E0203 07:27:09.129817 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ecd1be3-40af-41af-bbc6-78a346f02c44" containerName="mariadb-account-create-update" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.129839 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ecd1be3-40af-41af-bbc6-78a346f02c44" containerName="mariadb-account-create-update" Feb 03 07:27:09 crc kubenswrapper[4708]: E0203 07:27:09.129876 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb52368-cf93-4c82-926a-665f665ed84a" containerName="mariadb-database-create" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.129886 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb52368-cf93-4c82-926a-665f665ed84a" containerName="mariadb-database-create" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.130055 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ecd1be3-40af-41af-bbc6-78a346f02c44" containerName="mariadb-account-create-update" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.130080 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="acb52368-cf93-4c82-926a-665f665ed84a" containerName="mariadb-database-create" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.130632 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fjkqx" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.146177 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fjkqx"] Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.181090 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz4cq\" (UniqueName: \"kubernetes.io/projected/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-kube-api-access-kz4cq\") pod \"keystone-db-create-fjkqx\" (UID: \"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2\") " pod="openstack/keystone-db-create-fjkqx" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.181222 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-operator-scripts\") pod \"keystone-db-create-fjkqx\" (UID: \"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2\") " pod="openstack/keystone-db-create-fjkqx" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.206044 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-487ld" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.209974 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-487ld" event={"ID":"acb52368-cf93-4c82-926a-665f665ed84a","Type":"ContainerDied","Data":"222815727c0b1de3587ee12420db959945f3080dc5998a37d2f591e168e97a16"} Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.210027 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="222815727c0b1de3587ee12420db959945f3080dc5998a37d2f591e168e97a16" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.213495 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9889-account-create-update-xfq6v" event={"ID":"7ecd1be3-40af-41af-bbc6-78a346f02c44","Type":"ContainerDied","Data":"76e688295c8d576e889ab589ce51ae9370810e999cacf64f2ca95c9d4867d19d"} Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.213540 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76e688295c8d576e889ab589ce51ae9370810e999cacf64f2ca95c9d4867d19d" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.213542 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9889-account-create-update-xfq6v" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.215270 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5956-account-create-update-tvxq4"] Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.216204 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5956-account-create-update-tvxq4" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.230199 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.267761 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5956-account-create-update-tvxq4"] Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.283178 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-operator-scripts\") pod \"keystone-db-create-fjkqx\" (UID: \"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2\") " pod="openstack/keystone-db-create-fjkqx" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.283347 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9csg\" (UniqueName: \"kubernetes.io/projected/028be7ae-713a-49b6-9f35-930e0016d066-kube-api-access-h9csg\") pod \"keystone-5956-account-create-update-tvxq4\" (UID: \"028be7ae-713a-49b6-9f35-930e0016d066\") " pod="openstack/keystone-5956-account-create-update-tvxq4" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.283410 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz4cq\" (UniqueName: \"kubernetes.io/projected/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-kube-api-access-kz4cq\") pod \"keystone-db-create-fjkqx\" (UID: \"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2\") " pod="openstack/keystone-db-create-fjkqx" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.283450 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/028be7ae-713a-49b6-9f35-930e0016d066-operator-scripts\") pod \"keystone-5956-account-create-update-tvxq4\" (UID: \"028be7ae-713a-49b6-9f35-930e0016d066\") " pod="openstack/keystone-5956-account-create-update-tvxq4" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.283891 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-operator-scripts\") pod \"keystone-db-create-fjkqx\" (UID: \"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2\") " pod="openstack/keystone-db-create-fjkqx" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.299415 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz4cq\" (UniqueName: \"kubernetes.io/projected/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-kube-api-access-kz4cq\") pod \"keystone-db-create-fjkqx\" (UID: \"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2\") " pod="openstack/keystone-db-create-fjkqx" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.384886 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9csg\" (UniqueName: \"kubernetes.io/projected/028be7ae-713a-49b6-9f35-930e0016d066-kube-api-access-h9csg\") pod \"keystone-5956-account-create-update-tvxq4\" (UID: \"028be7ae-713a-49b6-9f35-930e0016d066\") " pod="openstack/keystone-5956-account-create-update-tvxq4" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.384958 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/028be7ae-713a-49b6-9f35-930e0016d066-operator-scripts\") pod \"keystone-5956-account-create-update-tvxq4\" (UID: \"028be7ae-713a-49b6-9f35-930e0016d066\") " pod="openstack/keystone-5956-account-create-update-tvxq4" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.385719 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/028be7ae-713a-49b6-9f35-930e0016d066-operator-scripts\") pod \"keystone-5956-account-create-update-tvxq4\" (UID: \"028be7ae-713a-49b6-9f35-930e0016d066\") " pod="openstack/keystone-5956-account-create-update-tvxq4" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.404586 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9csg\" (UniqueName: \"kubernetes.io/projected/028be7ae-713a-49b6-9f35-930e0016d066-kube-api-access-h9csg\") pod \"keystone-5956-account-create-update-tvxq4\" (UID: \"028be7ae-713a-49b6-9f35-930e0016d066\") " pod="openstack/keystone-5956-account-create-update-tvxq4" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.452087 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-vsnx7"] Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.454221 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fjkqx" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.457741 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-vsnx7" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.467457 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-vsnx7"] Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.500972 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rcz7k" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.544852 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5956-account-create-update-tvxq4" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.588820 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lx74j\" (UniqueName: \"kubernetes.io/projected/5606ed85-307e-4896-b509-f4fee8359589-kube-api-access-lx74j\") pod \"5606ed85-307e-4896-b509-f4fee8359589\" (UID: \"5606ed85-307e-4896-b509-f4fee8359589\") " Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.589105 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5606ed85-307e-4896-b509-f4fee8359589-operator-scripts\") pod \"5606ed85-307e-4896-b509-f4fee8359589\" (UID: \"5606ed85-307e-4896-b509-f4fee8359589\") " Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.589468 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5606ed85-307e-4896-b509-f4fee8359589-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5606ed85-307e-4896-b509-f4fee8359589" (UID: "5606ed85-307e-4896-b509-f4fee8359589"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.589743 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfqn5\" (UniqueName: \"kubernetes.io/projected/c944aa48-f235-469e-8513-106dab7e315a-kube-api-access-kfqn5\") pod \"placement-db-create-vsnx7\" (UID: \"c944aa48-f235-469e-8513-106dab7e315a\") " pod="openstack/placement-db-create-vsnx7" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.589852 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c944aa48-f235-469e-8513-106dab7e315a-operator-scripts\") pod \"placement-db-create-vsnx7\" (UID: \"c944aa48-f235-469e-8513-106dab7e315a\") " pod="openstack/placement-db-create-vsnx7" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.589938 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5606ed85-307e-4896-b509-f4fee8359589-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.604575 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5606ed85-307e-4896-b509-f4fee8359589-kube-api-access-lx74j" (OuterVolumeSpecName: "kube-api-access-lx74j") pod "5606ed85-307e-4896-b509-f4fee8359589" (UID: "5606ed85-307e-4896-b509-f4fee8359589"). InnerVolumeSpecName "kube-api-access-lx74j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.623910 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-07b1-account-create-update-xsnff"] Feb 03 07:27:09 crc kubenswrapper[4708]: E0203 07:27:09.624383 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5606ed85-307e-4896-b509-f4fee8359589" containerName="mariadb-account-create-update" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.624398 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="5606ed85-307e-4896-b509-f4fee8359589" containerName="mariadb-account-create-update" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.624552 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="5606ed85-307e-4896-b509-f4fee8359589" containerName="mariadb-account-create-update" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.625093 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-07b1-account-create-update-xsnff" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.637463 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.639559 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-pb4xp" podUID="3b5a2d58-5ebb-4838-a798-bc280fe99951" containerName="ovn-controller" probeResult="failure" output=< Feb 03 07:27:09 crc kubenswrapper[4708]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 03 07:27:09 crc kubenswrapper[4708]: > Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.643715 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-07b1-account-create-update-xsnff"] Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.691552 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c944aa48-f235-469e-8513-106dab7e315a-operator-scripts\") pod \"placement-db-create-vsnx7\" (UID: \"c944aa48-f235-469e-8513-106dab7e315a\") " pod="openstack/placement-db-create-vsnx7" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.691622 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tkd4\" (UniqueName: \"kubernetes.io/projected/4d163e5d-5089-42c1-b1d5-12960d6da873-kube-api-access-9tkd4\") pod \"placement-07b1-account-create-update-xsnff\" (UID: \"4d163e5d-5089-42c1-b1d5-12960d6da873\") " pod="openstack/placement-07b1-account-create-update-xsnff" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.691692 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d163e5d-5089-42c1-b1d5-12960d6da873-operator-scripts\") pod \"placement-07b1-account-create-update-xsnff\" (UID: \"4d163e5d-5089-42c1-b1d5-12960d6da873\") " pod="openstack/placement-07b1-account-create-update-xsnff" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.691725 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfqn5\" (UniqueName: \"kubernetes.io/projected/c944aa48-f235-469e-8513-106dab7e315a-kube-api-access-kfqn5\") pod \"placement-db-create-vsnx7\" (UID: \"c944aa48-f235-469e-8513-106dab7e315a\") " pod="openstack/placement-db-create-vsnx7" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.691808 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lx74j\" (UniqueName: \"kubernetes.io/projected/5606ed85-307e-4896-b509-f4fee8359589-kube-api-access-lx74j\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.692273 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c944aa48-f235-469e-8513-106dab7e315a-operator-scripts\") pod \"placement-db-create-vsnx7\" (UID: \"c944aa48-f235-469e-8513-106dab7e315a\") " pod="openstack/placement-db-create-vsnx7" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.714445 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfqn5\" (UniqueName: \"kubernetes.io/projected/c944aa48-f235-469e-8513-106dab7e315a-kube-api-access-kfqn5\") pod \"placement-db-create-vsnx7\" (UID: \"c944aa48-f235-469e-8513-106dab7e315a\") " pod="openstack/placement-db-create-vsnx7" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.774298 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-vsnx7" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.793140 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tkd4\" (UniqueName: \"kubernetes.io/projected/4d163e5d-5089-42c1-b1d5-12960d6da873-kube-api-access-9tkd4\") pod \"placement-07b1-account-create-update-xsnff\" (UID: \"4d163e5d-5089-42c1-b1d5-12960d6da873\") " pod="openstack/placement-07b1-account-create-update-xsnff" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.793240 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d163e5d-5089-42c1-b1d5-12960d6da873-operator-scripts\") pod \"placement-07b1-account-create-update-xsnff\" (UID: \"4d163e5d-5089-42c1-b1d5-12960d6da873\") " pod="openstack/placement-07b1-account-create-update-xsnff" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.794118 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d163e5d-5089-42c1-b1d5-12960d6da873-operator-scripts\") pod \"placement-07b1-account-create-update-xsnff\" (UID: \"4d163e5d-5089-42c1-b1d5-12960d6da873\") " pod="openstack/placement-07b1-account-create-update-xsnff" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.811875 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tkd4\" (UniqueName: \"kubernetes.io/projected/4d163e5d-5089-42c1-b1d5-12960d6da873-kube-api-access-9tkd4\") pod \"placement-07b1-account-create-update-xsnff\" (UID: \"4d163e5d-5089-42c1-b1d5-12960d6da873\") " pod="openstack/placement-07b1-account-create-update-xsnff" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.957525 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fjkqx"] Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.969245 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-07b1-account-create-update-xsnff" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.995316 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-t95nr"] Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.996403 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.998501 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Feb 03 07:27:09 crc kubenswrapper[4708]: I0203 07:27:09.999260 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rwpwp" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.007555 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-t95nr"] Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.099479 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-config-data\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.099553 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsdhc\" (UniqueName: \"kubernetes.io/projected/6201bbf3-523c-4a64-9703-fb0adbc0955a-kube-api-access-jsdhc\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.099966 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-db-sync-config-data\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.100010 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-combined-ca-bundle\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.118066 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5956-account-create-update-tvxq4"] Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.201315 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-db-sync-config-data\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.201364 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-combined-ca-bundle\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.201416 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-config-data\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.201471 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsdhc\" (UniqueName: \"kubernetes.io/projected/6201bbf3-523c-4a64-9703-fb0adbc0955a-kube-api-access-jsdhc\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.206299 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-db-sync-config-data\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.206348 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-config-data\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.207934 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-combined-ca-bundle\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.221460 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsdhc\" (UniqueName: \"kubernetes.io/projected/6201bbf3-523c-4a64-9703-fb0adbc0955a-kube-api-access-jsdhc\") pod \"glance-db-sync-t95nr\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.231461 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rcz7k" event={"ID":"5606ed85-307e-4896-b509-f4fee8359589","Type":"ContainerDied","Data":"53c83a30a489855044a459c1c090a99e252fc9d1b5625fc77384cf4fd8e686d6"} Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.231498 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53c83a30a489855044a459c1c090a99e252fc9d1b5625fc77384cf4fd8e686d6" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.232083 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rcz7k" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.243017 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fjkqx" event={"ID":"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2","Type":"ContainerStarted","Data":"cfbfab33c2a357b80e6ef389f7f25575f2dce6df3b653ce89303ee8450ba9dba"} Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.249698 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5956-account-create-update-tvxq4" event={"ID":"028be7ae-713a-49b6-9f35-930e0016d066","Type":"ContainerStarted","Data":"b244d6b1735a45ece62ae62310a40bc1e8264225ba64bf052189d373127b0193"} Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.259291 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-vsnx7"] Feb 03 07:27:10 crc kubenswrapper[4708]: W0203 07:27:10.276393 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc944aa48_f235_469e_8513_106dab7e315a.slice/crio-f8c3270420307159a38e60516f4414852b444dd6b62a2b88dd76d20440c9bb62 WatchSource:0}: Error finding container f8c3270420307159a38e60516f4414852b444dd6b62a2b88dd76d20440c9bb62: Status 404 returned error can't find the container with id f8c3270420307159a38e60516f4414852b444dd6b62a2b88dd76d20440c9bb62 Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.316476 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.463086 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-07b1-account-create-update-xsnff"] Feb 03 07:27:10 crc kubenswrapper[4708]: I0203 07:27:10.816253 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-t95nr"] Feb 03 07:27:11 crc kubenswrapper[4708]: I0203 07:27:11.258473 4708 generic.go:334] "Generic (PLEG): container finished" podID="43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2" containerID="2fb83885277b3edaa15fad24d83a064545239de33c1848376dfa7456d2ace340" exitCode=0 Feb 03 07:27:11 crc kubenswrapper[4708]: I0203 07:27:11.258556 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fjkqx" event={"ID":"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2","Type":"ContainerDied","Data":"2fb83885277b3edaa15fad24d83a064545239de33c1848376dfa7456d2ace340"} Feb 03 07:27:11 crc kubenswrapper[4708]: I0203 07:27:11.260213 4708 generic.go:334] "Generic (PLEG): container finished" podID="028be7ae-713a-49b6-9f35-930e0016d066" containerID="ab1eaab86bc10e9ddd04d713b0eec8f2a943dfde93409e9ace600168426d7e6b" exitCode=0 Feb 03 07:27:11 crc kubenswrapper[4708]: I0203 07:27:11.260272 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5956-account-create-update-tvxq4" event={"ID":"028be7ae-713a-49b6-9f35-930e0016d066","Type":"ContainerDied","Data":"ab1eaab86bc10e9ddd04d713b0eec8f2a943dfde93409e9ace600168426d7e6b"} Feb 03 07:27:11 crc kubenswrapper[4708]: I0203 07:27:11.262312 4708 generic.go:334] "Generic (PLEG): container finished" podID="c944aa48-f235-469e-8513-106dab7e315a" containerID="601d94218f3648bfef7b7b2fe2467ec80482bffa67a539703c2f12fcbde7952e" exitCode=0 Feb 03 07:27:11 crc kubenswrapper[4708]: I0203 07:27:11.262389 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-vsnx7" event={"ID":"c944aa48-f235-469e-8513-106dab7e315a","Type":"ContainerDied","Data":"601d94218f3648bfef7b7b2fe2467ec80482bffa67a539703c2f12fcbde7952e"} Feb 03 07:27:11 crc kubenswrapper[4708]: I0203 07:27:11.262408 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-vsnx7" event={"ID":"c944aa48-f235-469e-8513-106dab7e315a","Type":"ContainerStarted","Data":"f8c3270420307159a38e60516f4414852b444dd6b62a2b88dd76d20440c9bb62"} Feb 03 07:27:11 crc kubenswrapper[4708]: I0203 07:27:11.264017 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-07b1-account-create-update-xsnff" event={"ID":"4d163e5d-5089-42c1-b1d5-12960d6da873","Type":"ContainerStarted","Data":"ef4a3446e7aed57ee1ee3a52484424faf66f3e43b1c425d55cd94ae2f2184c3b"} Feb 03 07:27:11 crc kubenswrapper[4708]: I0203 07:27:11.265036 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-t95nr" event={"ID":"6201bbf3-523c-4a64-9703-fb0adbc0955a","Type":"ContainerStarted","Data":"dd6f5c040aab417cac2106e0ca5fe7032e28098452f8ce35cc1ac0cb1ea91f3c"} Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.126686 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.274573 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-07b1-account-create-update-xsnff" event={"ID":"4d163e5d-5089-42c1-b1d5-12960d6da873","Type":"ContainerStarted","Data":"ad977a123df838fec12ca9019254b0cc66a69bf9a2abf58c4a0ddb3bc6a611f7"} Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.706579 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5956-account-create-update-tvxq4" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.848241 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fjkqx" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.854822 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-vsnx7" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.883323 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/028be7ae-713a-49b6-9f35-930e0016d066-operator-scripts\") pod \"028be7ae-713a-49b6-9f35-930e0016d066\" (UID: \"028be7ae-713a-49b6-9f35-930e0016d066\") " Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.883514 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9csg\" (UniqueName: \"kubernetes.io/projected/028be7ae-713a-49b6-9f35-930e0016d066-kube-api-access-h9csg\") pod \"028be7ae-713a-49b6-9f35-930e0016d066\" (UID: \"028be7ae-713a-49b6-9f35-930e0016d066\") " Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.884366 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/028be7ae-713a-49b6-9f35-930e0016d066-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "028be7ae-713a-49b6-9f35-930e0016d066" (UID: "028be7ae-713a-49b6-9f35-930e0016d066"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.891939 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/028be7ae-713a-49b6-9f35-930e0016d066-kube-api-access-h9csg" (OuterVolumeSpecName: "kube-api-access-h9csg") pod "028be7ae-713a-49b6-9f35-930e0016d066" (UID: "028be7ae-713a-49b6-9f35-930e0016d066"). InnerVolumeSpecName "kube-api-access-h9csg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.945348 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-rcz7k"] Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.954587 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-rcz7k"] Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.984929 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c944aa48-f235-469e-8513-106dab7e315a-operator-scripts\") pod \"c944aa48-f235-469e-8513-106dab7e315a\" (UID: \"c944aa48-f235-469e-8513-106dab7e315a\") " Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.985003 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-operator-scripts\") pod \"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2\" (UID: \"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2\") " Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.985141 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kz4cq\" (UniqueName: \"kubernetes.io/projected/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-kube-api-access-kz4cq\") pod \"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2\" (UID: \"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2\") " Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.985232 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfqn5\" (UniqueName: \"kubernetes.io/projected/c944aa48-f235-469e-8513-106dab7e315a-kube-api-access-kfqn5\") pod \"c944aa48-f235-469e-8513-106dab7e315a\" (UID: \"c944aa48-f235-469e-8513-106dab7e315a\") " Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.985587 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9csg\" (UniqueName: \"kubernetes.io/projected/028be7ae-713a-49b6-9f35-930e0016d066-kube-api-access-h9csg\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.985606 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/028be7ae-713a-49b6-9f35-930e0016d066-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.985696 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c944aa48-f235-469e-8513-106dab7e315a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c944aa48-f235-469e-8513-106dab7e315a" (UID: "c944aa48-f235-469e-8513-106dab7e315a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.986467 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2" (UID: "43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.988253 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c944aa48-f235-469e-8513-106dab7e315a-kube-api-access-kfqn5" (OuterVolumeSpecName: "kube-api-access-kfqn5") pod "c944aa48-f235-469e-8513-106dab7e315a" (UID: "c944aa48-f235-469e-8513-106dab7e315a"). InnerVolumeSpecName "kube-api-access-kfqn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:12 crc kubenswrapper[4708]: I0203 07:27:12.989199 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-kube-api-access-kz4cq" (OuterVolumeSpecName: "kube-api-access-kz4cq") pod "43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2" (UID: "43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2"). InnerVolumeSpecName "kube-api-access-kz4cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.086684 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c944aa48-f235-469e-8513-106dab7e315a-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.086714 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.086724 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kz4cq\" (UniqueName: \"kubernetes.io/projected/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2-kube-api-access-kz4cq\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.086736 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfqn5\" (UniqueName: \"kubernetes.io/projected/c944aa48-f235-469e-8513-106dab7e315a-kube-api-access-kfqn5\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.284951 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-vsnx7" event={"ID":"c944aa48-f235-469e-8513-106dab7e315a","Type":"ContainerDied","Data":"f8c3270420307159a38e60516f4414852b444dd6b62a2b88dd76d20440c9bb62"} Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.285284 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8c3270420307159a38e60516f4414852b444dd6b62a2b88dd76d20440c9bb62" Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.285007 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-vsnx7" Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.286930 4708 generic.go:334] "Generic (PLEG): container finished" podID="4d163e5d-5089-42c1-b1d5-12960d6da873" containerID="ad977a123df838fec12ca9019254b0cc66a69bf9a2abf58c4a0ddb3bc6a611f7" exitCode=0 Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.286976 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-07b1-account-create-update-xsnff" event={"ID":"4d163e5d-5089-42c1-b1d5-12960d6da873","Type":"ContainerDied","Data":"ad977a123df838fec12ca9019254b0cc66a69bf9a2abf58c4a0ddb3bc6a611f7"} Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.289753 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fjkqx" Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.289767 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fjkqx" event={"ID":"43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2","Type":"ContainerDied","Data":"cfbfab33c2a357b80e6ef389f7f25575f2dce6df3b653ce89303ee8450ba9dba"} Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.289930 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cfbfab33c2a357b80e6ef389f7f25575f2dce6df3b653ce89303ee8450ba9dba" Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.293302 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5956-account-create-update-tvxq4" event={"ID":"028be7ae-713a-49b6-9f35-930e0016d066","Type":"ContainerDied","Data":"b244d6b1735a45ece62ae62310a40bc1e8264225ba64bf052189d373127b0193"} Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.293325 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b244d6b1735a45ece62ae62310a40bc1e8264225ba64bf052189d373127b0193" Feb 03 07:27:13 crc kubenswrapper[4708]: I0203 07:27:13.293363 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5956-account-create-update-tvxq4" Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.108287 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5606ed85-307e-4896-b509-f4fee8359589" path="/var/lib/kubelet/pods/5606ed85-307e-4896-b509-f4fee8359589/volumes" Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.406342 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.423145 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a0593ff7-ba15-46be-8879-70dc42f3beb2-etc-swift\") pod \"swift-storage-0\" (UID: \"a0593ff7-ba15-46be-8879-70dc42f3beb2\") " pod="openstack/swift-storage-0" Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.570898 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-pb4xp" podUID="3b5a2d58-5ebb-4838-a798-bc280fe99951" containerName="ovn-controller" probeResult="failure" output=< Feb 03 07:27:14 crc kubenswrapper[4708]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 03 07:27:14 crc kubenswrapper[4708]: > Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.667700 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-07b1-account-create-update-xsnff" Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.677082 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.814540 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tkd4\" (UniqueName: \"kubernetes.io/projected/4d163e5d-5089-42c1-b1d5-12960d6da873-kube-api-access-9tkd4\") pod \"4d163e5d-5089-42c1-b1d5-12960d6da873\" (UID: \"4d163e5d-5089-42c1-b1d5-12960d6da873\") " Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.814693 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d163e5d-5089-42c1-b1d5-12960d6da873-operator-scripts\") pod \"4d163e5d-5089-42c1-b1d5-12960d6da873\" (UID: \"4d163e5d-5089-42c1-b1d5-12960d6da873\") " Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.815556 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d163e5d-5089-42c1-b1d5-12960d6da873-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4d163e5d-5089-42c1-b1d5-12960d6da873" (UID: "4d163e5d-5089-42c1-b1d5-12960d6da873"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.819621 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d163e5d-5089-42c1-b1d5-12960d6da873-kube-api-access-9tkd4" (OuterVolumeSpecName: "kube-api-access-9tkd4") pod "4d163e5d-5089-42c1-b1d5-12960d6da873" (UID: "4d163e5d-5089-42c1-b1d5-12960d6da873"). InnerVolumeSpecName "kube-api-access-9tkd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.916993 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tkd4\" (UniqueName: \"kubernetes.io/projected/4d163e5d-5089-42c1-b1d5-12960d6da873-kube-api-access-9tkd4\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:14 crc kubenswrapper[4708]: I0203 07:27:14.918803 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d163e5d-5089-42c1-b1d5-12960d6da873-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:15 crc kubenswrapper[4708]: I0203 07:27:15.249436 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 03 07:27:15 crc kubenswrapper[4708]: I0203 07:27:15.310707 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-07b1-account-create-update-xsnff" event={"ID":"4d163e5d-5089-42c1-b1d5-12960d6da873","Type":"ContainerDied","Data":"ef4a3446e7aed57ee1ee3a52484424faf66f3e43b1c425d55cd94ae2f2184c3b"} Feb 03 07:27:15 crc kubenswrapper[4708]: I0203 07:27:15.310757 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef4a3446e7aed57ee1ee3a52484424faf66f3e43b1c425d55cd94ae2f2184c3b" Feb 03 07:27:15 crc kubenswrapper[4708]: I0203 07:27:15.310817 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-07b1-account-create-update-xsnff" Feb 03 07:27:15 crc kubenswrapper[4708]: I0203 07:27:15.311907 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"9c8098dda628f1c1e7de960d83f705b3f35e42ea370648d21cc2005e6c62a78f"} Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.141999 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.340349 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"b86a482340e27a72c7aa1938d0bc13c758378a7adedb088fd9892ad60226a423"} Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.514003 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.528843 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-kk829"] Feb 03 07:27:16 crc kubenswrapper[4708]: E0203 07:27:16.529243 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c944aa48-f235-469e-8513-106dab7e315a" containerName="mariadb-database-create" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.529262 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="c944aa48-f235-469e-8513-106dab7e315a" containerName="mariadb-database-create" Feb 03 07:27:16 crc kubenswrapper[4708]: E0203 07:27:16.529299 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2" containerName="mariadb-database-create" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.529307 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2" containerName="mariadb-database-create" Feb 03 07:27:16 crc kubenswrapper[4708]: E0203 07:27:16.529318 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="028be7ae-713a-49b6-9f35-930e0016d066" containerName="mariadb-account-create-update" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.529327 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="028be7ae-713a-49b6-9f35-930e0016d066" containerName="mariadb-account-create-update" Feb 03 07:27:16 crc kubenswrapper[4708]: E0203 07:27:16.529341 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d163e5d-5089-42c1-b1d5-12960d6da873" containerName="mariadb-account-create-update" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.529348 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d163e5d-5089-42c1-b1d5-12960d6da873" containerName="mariadb-account-create-update" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.531146 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d163e5d-5089-42c1-b1d5-12960d6da873" containerName="mariadb-account-create-update" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.531179 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="028be7ae-713a-49b6-9f35-930e0016d066" containerName="mariadb-account-create-update" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.531192 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2" containerName="mariadb-database-create" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.531206 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="c944aa48-f235-469e-8513-106dab7e315a" containerName="mariadb-database-create" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.531891 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-kk829" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.541471 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-kk829"] Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.648843 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/063192c5-ad89-4dde-bad6-af78e8bf8459-operator-scripts\") pod \"cinder-db-create-kk829\" (UID: \"063192c5-ad89-4dde-bad6-af78e8bf8459\") " pod="openstack/cinder-db-create-kk829" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.648923 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbs4s\" (UniqueName: \"kubernetes.io/projected/063192c5-ad89-4dde-bad6-af78e8bf8459-kube-api-access-lbs4s\") pod \"cinder-db-create-kk829\" (UID: \"063192c5-ad89-4dde-bad6-af78e8bf8459\") " pod="openstack/cinder-db-create-kk829" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.710054 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-0d95-account-create-update-jbjg4"] Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.711057 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0d95-account-create-update-jbjg4" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.715193 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.750688 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/063192c5-ad89-4dde-bad6-af78e8bf8459-operator-scripts\") pod \"cinder-db-create-kk829\" (UID: \"063192c5-ad89-4dde-bad6-af78e8bf8459\") " pod="openstack/cinder-db-create-kk829" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.750734 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbs4s\" (UniqueName: \"kubernetes.io/projected/063192c5-ad89-4dde-bad6-af78e8bf8459-kube-api-access-lbs4s\") pod \"cinder-db-create-kk829\" (UID: \"063192c5-ad89-4dde-bad6-af78e8bf8459\") " pod="openstack/cinder-db-create-kk829" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.751638 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/063192c5-ad89-4dde-bad6-af78e8bf8459-operator-scripts\") pod \"cinder-db-create-kk829\" (UID: \"063192c5-ad89-4dde-bad6-af78e8bf8459\") " pod="openstack/cinder-db-create-kk829" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.778739 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0d95-account-create-update-jbjg4"] Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.791614 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbs4s\" (UniqueName: \"kubernetes.io/projected/063192c5-ad89-4dde-bad6-af78e8bf8459-kube-api-access-lbs4s\") pod \"cinder-db-create-kk829\" (UID: \"063192c5-ad89-4dde-bad6-af78e8bf8459\") " pod="openstack/cinder-db-create-kk829" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.796250 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-5knks"] Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.797161 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5knks" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.820920 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-5knks"] Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.854181 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbcc7\" (UniqueName: \"kubernetes.io/projected/0e8f947f-87bc-4215-8c50-2409fb2b274f-kube-api-access-lbcc7\") pod \"cinder-0d95-account-create-update-jbjg4\" (UID: \"0e8f947f-87bc-4215-8c50-2409fb2b274f\") " pod="openstack/cinder-0d95-account-create-update-jbjg4" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.854255 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e8f947f-87bc-4215-8c50-2409fb2b274f-operator-scripts\") pod \"cinder-0d95-account-create-update-jbjg4\" (UID: \"0e8f947f-87bc-4215-8c50-2409fb2b274f\") " pod="openstack/cinder-0d95-account-create-update-jbjg4" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.855982 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-kk829" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.891765 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-sgtsg"] Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.893835 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.906875 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.907063 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.907110 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.907257 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x5pvc" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.911234 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-45fjr"] Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.912577 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-45fjr" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.921114 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-sgtsg"] Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.940084 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-45fjr"] Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.962520 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhw5c\" (UniqueName: \"kubernetes.io/projected/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-kube-api-access-lhw5c\") pod \"barbican-db-create-5knks\" (UID: \"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a\") " pod="openstack/barbican-db-create-5knks" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.962905 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbcc7\" (UniqueName: \"kubernetes.io/projected/0e8f947f-87bc-4215-8c50-2409fb2b274f-kube-api-access-lbcc7\") pod \"cinder-0d95-account-create-update-jbjg4\" (UID: \"0e8f947f-87bc-4215-8c50-2409fb2b274f\") " pod="openstack/cinder-0d95-account-create-update-jbjg4" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.963078 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-operator-scripts\") pod \"barbican-db-create-5knks\" (UID: \"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a\") " pod="openstack/barbican-db-create-5knks" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.963244 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e8f947f-87bc-4215-8c50-2409fb2b274f-operator-scripts\") pod \"cinder-0d95-account-create-update-jbjg4\" (UID: \"0e8f947f-87bc-4215-8c50-2409fb2b274f\") " pod="openstack/cinder-0d95-account-create-update-jbjg4" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.965726 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e8f947f-87bc-4215-8c50-2409fb2b274f-operator-scripts\") pod \"cinder-0d95-account-create-update-jbjg4\" (UID: \"0e8f947f-87bc-4215-8c50-2409fb2b274f\") " pod="openstack/cinder-0d95-account-create-update-jbjg4" Feb 03 07:27:16 crc kubenswrapper[4708]: I0203 07:27:16.990788 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbcc7\" (UniqueName: \"kubernetes.io/projected/0e8f947f-87bc-4215-8c50-2409fb2b274f-kube-api-access-lbcc7\") pod \"cinder-0d95-account-create-update-jbjg4\" (UID: \"0e8f947f-87bc-4215-8c50-2409fb2b274f\") " pod="openstack/cinder-0d95-account-create-update-jbjg4" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.024319 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dbf8-account-create-update-gxd7n"] Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.026357 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbf8-account-create-update-gxd7n" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.029264 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.039669 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dbf8-account-create-update-gxd7n"] Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.047364 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0d95-account-create-update-jbjg4" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.066081 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqkpp\" (UniqueName: \"kubernetes.io/projected/adc80637-973b-4bd0-b444-4d2d41e23b8b-kube-api-access-vqkpp\") pod \"keystone-db-sync-sgtsg\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.066144 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-combined-ca-bundle\") pod \"keystone-db-sync-sgtsg\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.066178 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f83726be-8013-490c-92c3-f19b0a04c112-operator-scripts\") pod \"neutron-db-create-45fjr\" (UID: \"f83726be-8013-490c-92c3-f19b0a04c112\") " pod="openstack/neutron-db-create-45fjr" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.066198 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2k9h\" (UniqueName: \"kubernetes.io/projected/f83726be-8013-490c-92c3-f19b0a04c112-kube-api-access-c2k9h\") pod \"neutron-db-create-45fjr\" (UID: \"f83726be-8013-490c-92c3-f19b0a04c112\") " pod="openstack/neutron-db-create-45fjr" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.066233 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhw5c\" (UniqueName: \"kubernetes.io/projected/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-kube-api-access-lhw5c\") pod \"barbican-db-create-5knks\" (UID: \"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a\") " pod="openstack/barbican-db-create-5knks" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.066277 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-config-data\") pod \"keystone-db-sync-sgtsg\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.066314 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-operator-scripts\") pod \"barbican-db-create-5knks\" (UID: \"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a\") " pod="openstack/barbican-db-create-5knks" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.067173 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-operator-scripts\") pod \"barbican-db-create-5knks\" (UID: \"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a\") " pod="openstack/barbican-db-create-5knks" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.098045 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhw5c\" (UniqueName: \"kubernetes.io/projected/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-kube-api-access-lhw5c\") pod \"barbican-db-create-5knks\" (UID: \"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a\") " pod="openstack/barbican-db-create-5knks" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.111591 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-a9e3-account-create-update-hf4t4"] Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.114093 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a9e3-account-create-update-hf4t4" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.119350 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.120531 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-a9e3-account-create-update-hf4t4"] Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.128365 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5knks" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.167484 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01fcd20e-f6ef-4ecc-b29a-98b053efae92-operator-scripts\") pod \"neutron-dbf8-account-create-update-gxd7n\" (UID: \"01fcd20e-f6ef-4ecc-b29a-98b053efae92\") " pod="openstack/neutron-dbf8-account-create-update-gxd7n" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.167589 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqkpp\" (UniqueName: \"kubernetes.io/projected/adc80637-973b-4bd0-b444-4d2d41e23b8b-kube-api-access-vqkpp\") pod \"keystone-db-sync-sgtsg\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.167634 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-combined-ca-bundle\") pod \"keystone-db-sync-sgtsg\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.167666 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f83726be-8013-490c-92c3-f19b0a04c112-operator-scripts\") pod \"neutron-db-create-45fjr\" (UID: \"f83726be-8013-490c-92c3-f19b0a04c112\") " pod="openstack/neutron-db-create-45fjr" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.167693 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2k9h\" (UniqueName: \"kubernetes.io/projected/f83726be-8013-490c-92c3-f19b0a04c112-kube-api-access-c2k9h\") pod \"neutron-db-create-45fjr\" (UID: \"f83726be-8013-490c-92c3-f19b0a04c112\") " pod="openstack/neutron-db-create-45fjr" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.167755 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-config-data\") pod \"keystone-db-sync-sgtsg\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.167788 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q48kq\" (UniqueName: \"kubernetes.io/projected/01fcd20e-f6ef-4ecc-b29a-98b053efae92-kube-api-access-q48kq\") pod \"neutron-dbf8-account-create-update-gxd7n\" (UID: \"01fcd20e-f6ef-4ecc-b29a-98b053efae92\") " pod="openstack/neutron-dbf8-account-create-update-gxd7n" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.175408 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-config-data\") pod \"keystone-db-sync-sgtsg\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.183760 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-combined-ca-bundle\") pod \"keystone-db-sync-sgtsg\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.184464 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f83726be-8013-490c-92c3-f19b0a04c112-operator-scripts\") pod \"neutron-db-create-45fjr\" (UID: \"f83726be-8013-490c-92c3-f19b0a04c112\") " pod="openstack/neutron-db-create-45fjr" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.186476 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqkpp\" (UniqueName: \"kubernetes.io/projected/adc80637-973b-4bd0-b444-4d2d41e23b8b-kube-api-access-vqkpp\") pod \"keystone-db-sync-sgtsg\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.192977 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2k9h\" (UniqueName: \"kubernetes.io/projected/f83726be-8013-490c-92c3-f19b0a04c112-kube-api-access-c2k9h\") pod \"neutron-db-create-45fjr\" (UID: \"f83726be-8013-490c-92c3-f19b0a04c112\") " pod="openstack/neutron-db-create-45fjr" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.237809 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.248170 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-45fjr" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.271790 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q48kq\" (UniqueName: \"kubernetes.io/projected/01fcd20e-f6ef-4ecc-b29a-98b053efae92-kube-api-access-q48kq\") pod \"neutron-dbf8-account-create-update-gxd7n\" (UID: \"01fcd20e-f6ef-4ecc-b29a-98b053efae92\") " pod="openstack/neutron-dbf8-account-create-update-gxd7n" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.271867 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb6rr\" (UniqueName: \"kubernetes.io/projected/26e5e627-4533-4800-bd50-826271c5dbef-kube-api-access-fb6rr\") pod \"barbican-a9e3-account-create-update-hf4t4\" (UID: \"26e5e627-4533-4800-bd50-826271c5dbef\") " pod="openstack/barbican-a9e3-account-create-update-hf4t4" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.271900 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01fcd20e-f6ef-4ecc-b29a-98b053efae92-operator-scripts\") pod \"neutron-dbf8-account-create-update-gxd7n\" (UID: \"01fcd20e-f6ef-4ecc-b29a-98b053efae92\") " pod="openstack/neutron-dbf8-account-create-update-gxd7n" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.272021 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26e5e627-4533-4800-bd50-826271c5dbef-operator-scripts\") pod \"barbican-a9e3-account-create-update-hf4t4\" (UID: \"26e5e627-4533-4800-bd50-826271c5dbef\") " pod="openstack/barbican-a9e3-account-create-update-hf4t4" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.272688 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01fcd20e-f6ef-4ecc-b29a-98b053efae92-operator-scripts\") pod \"neutron-dbf8-account-create-update-gxd7n\" (UID: \"01fcd20e-f6ef-4ecc-b29a-98b053efae92\") " pod="openstack/neutron-dbf8-account-create-update-gxd7n" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.293374 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q48kq\" (UniqueName: \"kubernetes.io/projected/01fcd20e-f6ef-4ecc-b29a-98b053efae92-kube-api-access-q48kq\") pod \"neutron-dbf8-account-create-update-gxd7n\" (UID: \"01fcd20e-f6ef-4ecc-b29a-98b053efae92\") " pod="openstack/neutron-dbf8-account-create-update-gxd7n" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.351948 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbf8-account-create-update-gxd7n" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.366151 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"0addf01b0a12f4c9f5ae9409452d0592c614969dc3af61e25ba807c39c909ce4"} Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.373254 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb6rr\" (UniqueName: \"kubernetes.io/projected/26e5e627-4533-4800-bd50-826271c5dbef-kube-api-access-fb6rr\") pod \"barbican-a9e3-account-create-update-hf4t4\" (UID: \"26e5e627-4533-4800-bd50-826271c5dbef\") " pod="openstack/barbican-a9e3-account-create-update-hf4t4" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.373334 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26e5e627-4533-4800-bd50-826271c5dbef-operator-scripts\") pod \"barbican-a9e3-account-create-update-hf4t4\" (UID: \"26e5e627-4533-4800-bd50-826271c5dbef\") " pod="openstack/barbican-a9e3-account-create-update-hf4t4" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.374027 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26e5e627-4533-4800-bd50-826271c5dbef-operator-scripts\") pod \"barbican-a9e3-account-create-update-hf4t4\" (UID: \"26e5e627-4533-4800-bd50-826271c5dbef\") " pod="openstack/barbican-a9e3-account-create-update-hf4t4" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.399731 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb6rr\" (UniqueName: \"kubernetes.io/projected/26e5e627-4533-4800-bd50-826271c5dbef-kube-api-access-fb6rr\") pod \"barbican-a9e3-account-create-update-hf4t4\" (UID: \"26e5e627-4533-4800-bd50-826271c5dbef\") " pod="openstack/barbican-a9e3-account-create-update-hf4t4" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.463380 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-kk829"] Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.531402 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-5knks"] Feb 03 07:27:17 crc kubenswrapper[4708]: W0203 07:27:17.548684 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7be3ad58_dba8_419b_85e7_52ca0ae0fe3a.slice/crio-0aa866eab1b34c79457fbeb6156b324bacd8d81116563189b0d45e555167f590 WatchSource:0}: Error finding container 0aa866eab1b34c79457fbeb6156b324bacd8d81116563189b0d45e555167f590: Status 404 returned error can't find the container with id 0aa866eab1b34c79457fbeb6156b324bacd8d81116563189b0d45e555167f590 Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.616150 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a9e3-account-create-update-hf4t4" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.639113 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0d95-account-create-update-jbjg4"] Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.702385 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-sgtsg"] Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.812776 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-45fjr"] Feb 03 07:27:17 crc kubenswrapper[4708]: W0203 07:27:17.845299 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf83726be_8013_490c_92c3_f19b0a04c112.slice/crio-da6885540248479148ffab0ff48b466eafde47fda35fc56129e8f035dc226750 WatchSource:0}: Error finding container da6885540248479148ffab0ff48b466eafde47fda35fc56129e8f035dc226750: Status 404 returned error can't find the container with id da6885540248479148ffab0ff48b466eafde47fda35fc56129e8f035dc226750 Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.940194 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-a9e3-account-create-update-hf4t4"] Feb 03 07:27:17 crc kubenswrapper[4708]: W0203 07:27:17.953571 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26e5e627_4533_4800_bd50_826271c5dbef.slice/crio-bfc8bf756d8cba3aec537974e66a96862202dd5beeda2d9dc7571aaf373cae9c WatchSource:0}: Error finding container bfc8bf756d8cba3aec537974e66a96862202dd5beeda2d9dc7571aaf373cae9c: Status 404 returned error can't find the container with id bfc8bf756d8cba3aec537974e66a96862202dd5beeda2d9dc7571aaf373cae9c Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.975531 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dbf8-account-create-update-gxd7n"] Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.985446 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-g8kn6"] Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.986406 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-g8kn6" Feb 03 07:27:17 crc kubenswrapper[4708]: I0203 07:27:17.992877 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.000927 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-g8kn6"] Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.087389 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df850a16-e0b1-4d87-913a-a30a7f2365be-operator-scripts\") pod \"root-account-create-update-g8kn6\" (UID: \"df850a16-e0b1-4d87-913a-a30a7f2365be\") " pod="openstack/root-account-create-update-g8kn6" Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.087512 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t7tg\" (UniqueName: \"kubernetes.io/projected/df850a16-e0b1-4d87-913a-a30a7f2365be-kube-api-access-9t7tg\") pod \"root-account-create-update-g8kn6\" (UID: \"df850a16-e0b1-4d87-913a-a30a7f2365be\") " pod="openstack/root-account-create-update-g8kn6" Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.189393 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df850a16-e0b1-4d87-913a-a30a7f2365be-operator-scripts\") pod \"root-account-create-update-g8kn6\" (UID: \"df850a16-e0b1-4d87-913a-a30a7f2365be\") " pod="openstack/root-account-create-update-g8kn6" Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.189830 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t7tg\" (UniqueName: \"kubernetes.io/projected/df850a16-e0b1-4d87-913a-a30a7f2365be-kube-api-access-9t7tg\") pod \"root-account-create-update-g8kn6\" (UID: \"df850a16-e0b1-4d87-913a-a30a7f2365be\") " pod="openstack/root-account-create-update-g8kn6" Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.190349 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df850a16-e0b1-4d87-913a-a30a7f2365be-operator-scripts\") pod \"root-account-create-update-g8kn6\" (UID: \"df850a16-e0b1-4d87-913a-a30a7f2365be\") " pod="openstack/root-account-create-update-g8kn6" Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.213559 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t7tg\" (UniqueName: \"kubernetes.io/projected/df850a16-e0b1-4d87-913a-a30a7f2365be-kube-api-access-9t7tg\") pod \"root-account-create-update-g8kn6\" (UID: \"df850a16-e0b1-4d87-913a-a30a7f2365be\") " pod="openstack/root-account-create-update-g8kn6" Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.303851 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-g8kn6" Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.381974 4708 generic.go:334] "Generic (PLEG): container finished" podID="7be3ad58-dba8-419b-85e7-52ca0ae0fe3a" containerID="119c7c1f02fcb1f981f75b1abd410e597d9e6a7350b703293d44655b690fbd2d" exitCode=0 Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.382098 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-5knks" event={"ID":"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a","Type":"ContainerDied","Data":"119c7c1f02fcb1f981f75b1abd410e597d9e6a7350b703293d44655b690fbd2d"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.382155 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-5knks" event={"ID":"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a","Type":"ContainerStarted","Data":"0aa866eab1b34c79457fbeb6156b324bacd8d81116563189b0d45e555167f590"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.386428 4708 generic.go:334] "Generic (PLEG): container finished" podID="063192c5-ad89-4dde-bad6-af78e8bf8459" containerID="8a23b5de7c532ce6a09b7cc55992a91a57b4f43569b5894c9fe647c58f23d7be" exitCode=0 Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.386516 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-kk829" event={"ID":"063192c5-ad89-4dde-bad6-af78e8bf8459","Type":"ContainerDied","Data":"8a23b5de7c532ce6a09b7cc55992a91a57b4f43569b5894c9fe647c58f23d7be"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.386540 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-kk829" event={"ID":"063192c5-ad89-4dde-bad6-af78e8bf8459","Type":"ContainerStarted","Data":"9a4da016a62dbdc559c2ec02c48e21a4da63a8a9f69ae4f252099a319693b69e"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.393433 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a9e3-account-create-update-hf4t4" event={"ID":"26e5e627-4533-4800-bd50-826271c5dbef","Type":"ContainerStarted","Data":"bfc8bf756d8cba3aec537974e66a96862202dd5beeda2d9dc7571aaf373cae9c"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.395862 4708 generic.go:334] "Generic (PLEG): container finished" podID="f83726be-8013-490c-92c3-f19b0a04c112" containerID="68a58edb6f1952eabb4ec048fa9ded7a670e63db09807e91c7899377931c953c" exitCode=0 Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.395903 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-45fjr" event={"ID":"f83726be-8013-490c-92c3-f19b0a04c112","Type":"ContainerDied","Data":"68a58edb6f1952eabb4ec048fa9ded7a670e63db09807e91c7899377931c953c"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.395919 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-45fjr" event={"ID":"f83726be-8013-490c-92c3-f19b0a04c112","Type":"ContainerStarted","Data":"da6885540248479148ffab0ff48b466eafde47fda35fc56129e8f035dc226750"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.397645 4708 generic.go:334] "Generic (PLEG): container finished" podID="0e8f947f-87bc-4215-8c50-2409fb2b274f" containerID="05e10050ba3569a7493fc63f945ecba165124bda1a45dd50c95665efe2c2bb19" exitCode=0 Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.397689 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0d95-account-create-update-jbjg4" event={"ID":"0e8f947f-87bc-4215-8c50-2409fb2b274f","Type":"ContainerDied","Data":"05e10050ba3569a7493fc63f945ecba165124bda1a45dd50c95665efe2c2bb19"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.397704 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0d95-account-create-update-jbjg4" event={"ID":"0e8f947f-87bc-4215-8c50-2409fb2b274f","Type":"ContainerStarted","Data":"93c61127a607a63f2c40342713a244a0fe6a4327732dd028c34fb6c0208ffa98"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.403336 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbf8-account-create-update-gxd7n" event={"ID":"01fcd20e-f6ef-4ecc-b29a-98b053efae92","Type":"ContainerStarted","Data":"c48383f06e4444b9a4175f9d56cecb98afe823c24ec1ac84af258937d02b8e8c"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.404934 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sgtsg" event={"ID":"adc80637-973b-4bd0-b444-4d2d41e23b8b","Type":"ContainerStarted","Data":"0e1f121a8862b9efb196f8cc200b88b9afc88572b0302dd8bc79d94ead19c344"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.407085 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"0c9efeb0aa897d22196e665ab7ab3908ab641ed6bdf2e48b2eb6825b89880a17"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.407111 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"5a11b9fa55b094a60159149df387af324ce19a2d35d72e1992fec20c3a6de229"} Feb 03 07:27:18 crc kubenswrapper[4708]: I0203 07:27:18.852606 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-g8kn6"] Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.423853 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a9e3-account-create-update-hf4t4" event={"ID":"26e5e627-4533-4800-bd50-826271c5dbef","Type":"ContainerStarted","Data":"f50f4bfa6774b43edeab9cfda1955788a567fb833ab63946760f5f6a8937ada0"} Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.426235 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbf8-account-create-update-gxd7n" event={"ID":"01fcd20e-f6ef-4ecc-b29a-98b053efae92","Type":"ContainerStarted","Data":"5e3636632c55cf3a594c28ee75b7ad6ad332bcfd0943b701a3b8371981e85f2f"} Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.444832 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-a9e3-account-create-update-hf4t4" podStartSLOduration=2.444815386 podStartE2EDuration="2.444815386s" podCreationTimestamp="2026-02-03 07:27:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:19.442764126 +0000 UTC m=+1018.424710933" watchObservedRunningTime="2026-02-03 07:27:19.444815386 +0000 UTC m=+1018.426762193" Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.469661 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dbf8-account-create-update-gxd7n" podStartSLOduration=3.469645354 podStartE2EDuration="3.469645354s" podCreationTimestamp="2026-02-03 07:27:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:19.464913588 +0000 UTC m=+1018.446860395" watchObservedRunningTime="2026-02-03 07:27:19.469645354 +0000 UTC m=+1018.451592161" Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.584563 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-pb4xp" podUID="3b5a2d58-5ebb-4838-a798-bc280fe99951" containerName="ovn-controller" probeResult="failure" output=< Feb 03 07:27:19 crc kubenswrapper[4708]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 03 07:27:19 crc kubenswrapper[4708]: > Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.600051 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-48bcs" Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.802988 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-pb4xp-config-kxzv9"] Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.803908 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.813328 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-pb4xp-config-kxzv9"] Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.818990 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.915890 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q72v8\" (UniqueName: \"kubernetes.io/projected/efc4f37b-bfec-47a1-93be-28b47d107130-kube-api-access-q72v8\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.915957 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-additional-scripts\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.915992 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-scripts\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.916072 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run-ovn\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.916115 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-log-ovn\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:19 crc kubenswrapper[4708]: I0203 07:27:19.916191 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.030017 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.030159 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q72v8\" (UniqueName: \"kubernetes.io/projected/efc4f37b-bfec-47a1-93be-28b47d107130-kube-api-access-q72v8\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.030251 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-additional-scripts\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.030330 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-scripts\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.030318 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.030602 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run-ovn\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.030694 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-log-ovn\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.030841 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-log-ovn\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.032188 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-scripts\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.032286 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run-ovn\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.032922 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-additional-scripts\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.052218 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q72v8\" (UniqueName: \"kubernetes.io/projected/efc4f37b-bfec-47a1-93be-28b47d107130-kube-api-access-q72v8\") pod \"ovn-controller-pb4xp-config-kxzv9\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:20 crc kubenswrapper[4708]: I0203 07:27:20.142521 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:21 crc kubenswrapper[4708]: I0203 07:27:21.443273 4708 generic.go:334] "Generic (PLEG): container finished" podID="26e5e627-4533-4800-bd50-826271c5dbef" containerID="f50f4bfa6774b43edeab9cfda1955788a567fb833ab63946760f5f6a8937ada0" exitCode=0 Feb 03 07:27:21 crc kubenswrapper[4708]: I0203 07:27:21.443351 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a9e3-account-create-update-hf4t4" event={"ID":"26e5e627-4533-4800-bd50-826271c5dbef","Type":"ContainerDied","Data":"f50f4bfa6774b43edeab9cfda1955788a567fb833ab63946760f5f6a8937ada0"} Feb 03 07:27:21 crc kubenswrapper[4708]: I0203 07:27:21.445536 4708 generic.go:334] "Generic (PLEG): container finished" podID="01fcd20e-f6ef-4ecc-b29a-98b053efae92" containerID="5e3636632c55cf3a594c28ee75b7ad6ad332bcfd0943b701a3b8371981e85f2f" exitCode=0 Feb 03 07:27:21 crc kubenswrapper[4708]: I0203 07:27:21.445579 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbf8-account-create-update-gxd7n" event={"ID":"01fcd20e-f6ef-4ecc-b29a-98b053efae92","Type":"ContainerDied","Data":"5e3636632c55cf3a594c28ee75b7ad6ad332bcfd0943b701a3b8371981e85f2f"} Feb 03 07:27:23 crc kubenswrapper[4708]: I0203 07:27:23.833219 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:27:23 crc kubenswrapper[4708]: I0203 07:27:23.834839 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:27:24 crc kubenswrapper[4708]: I0203 07:27:24.574954 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-pb4xp" podUID="3b5a2d58-5ebb-4838-a798-bc280fe99951" containerName="ovn-controller" probeResult="failure" output=< Feb 03 07:27:24 crc kubenswrapper[4708]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 03 07:27:24 crc kubenswrapper[4708]: > Feb 03 07:27:26 crc kubenswrapper[4708]: W0203 07:27:26.168997 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf850a16_e0b1_4d87_913a_a30a7f2365be.slice/crio-7e52c78064adeb3043fbf0eca024d63f926347170f06a4d28cf461a5a8c1d893 WatchSource:0}: Error finding container 7e52c78064adeb3043fbf0eca024d63f926347170f06a4d28cf461a5a8c1d893: Status 404 returned error can't find the container with id 7e52c78064adeb3043fbf0eca024d63f926347170f06a4d28cf461a5a8c1d893 Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.193530 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.253621 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5knks" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.266124 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a9e3-account-create-update-hf4t4" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.280902 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbf8-account-create-update-gxd7n" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.304438 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0d95-account-create-update-jbjg4" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.309431 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-45fjr" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.326875 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-kk829" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.348133 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fb6rr\" (UniqueName: \"kubernetes.io/projected/26e5e627-4533-4800-bd50-826271c5dbef-kube-api-access-fb6rr\") pod \"26e5e627-4533-4800-bd50-826271c5dbef\" (UID: \"26e5e627-4533-4800-bd50-826271c5dbef\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.348302 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-operator-scripts\") pod \"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a\" (UID: \"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.348330 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26e5e627-4533-4800-bd50-826271c5dbef-operator-scripts\") pod \"26e5e627-4533-4800-bd50-826271c5dbef\" (UID: \"26e5e627-4533-4800-bd50-826271c5dbef\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.348412 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhw5c\" (UniqueName: \"kubernetes.io/projected/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-kube-api-access-lhw5c\") pod \"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a\" (UID: \"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.349748 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26e5e627-4533-4800-bd50-826271c5dbef-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "26e5e627-4533-4800-bd50-826271c5dbef" (UID: "26e5e627-4533-4800-bd50-826271c5dbef"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.350010 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7be3ad58-dba8-419b-85e7-52ca0ae0fe3a" (UID: "7be3ad58-dba8-419b-85e7-52ca0ae0fe3a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.357052 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26e5e627-4533-4800-bd50-826271c5dbef-kube-api-access-fb6rr" (OuterVolumeSpecName: "kube-api-access-fb6rr") pod "26e5e627-4533-4800-bd50-826271c5dbef" (UID: "26e5e627-4533-4800-bd50-826271c5dbef"). InnerVolumeSpecName "kube-api-access-fb6rr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.357644 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-kube-api-access-lhw5c" (OuterVolumeSpecName: "kube-api-access-lhw5c") pod "7be3ad58-dba8-419b-85e7-52ca0ae0fe3a" (UID: "7be3ad58-dba8-419b-85e7-52ca0ae0fe3a"). InnerVolumeSpecName "kube-api-access-lhw5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.450584 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbs4s\" (UniqueName: \"kubernetes.io/projected/063192c5-ad89-4dde-bad6-af78e8bf8459-kube-api-access-lbs4s\") pod \"063192c5-ad89-4dde-bad6-af78e8bf8459\" (UID: \"063192c5-ad89-4dde-bad6-af78e8bf8459\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.450695 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01fcd20e-f6ef-4ecc-b29a-98b053efae92-operator-scripts\") pod \"01fcd20e-f6ef-4ecc-b29a-98b053efae92\" (UID: \"01fcd20e-f6ef-4ecc-b29a-98b053efae92\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.450753 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbcc7\" (UniqueName: \"kubernetes.io/projected/0e8f947f-87bc-4215-8c50-2409fb2b274f-kube-api-access-lbcc7\") pod \"0e8f947f-87bc-4215-8c50-2409fb2b274f\" (UID: \"0e8f947f-87bc-4215-8c50-2409fb2b274f\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.450788 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q48kq\" (UniqueName: \"kubernetes.io/projected/01fcd20e-f6ef-4ecc-b29a-98b053efae92-kube-api-access-q48kq\") pod \"01fcd20e-f6ef-4ecc-b29a-98b053efae92\" (UID: \"01fcd20e-f6ef-4ecc-b29a-98b053efae92\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.450915 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f83726be-8013-490c-92c3-f19b0a04c112-operator-scripts\") pod \"f83726be-8013-490c-92c3-f19b0a04c112\" (UID: \"f83726be-8013-490c-92c3-f19b0a04c112\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.451062 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2k9h\" (UniqueName: \"kubernetes.io/projected/f83726be-8013-490c-92c3-f19b0a04c112-kube-api-access-c2k9h\") pod \"f83726be-8013-490c-92c3-f19b0a04c112\" (UID: \"f83726be-8013-490c-92c3-f19b0a04c112\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.451152 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/063192c5-ad89-4dde-bad6-af78e8bf8459-operator-scripts\") pod \"063192c5-ad89-4dde-bad6-af78e8bf8459\" (UID: \"063192c5-ad89-4dde-bad6-af78e8bf8459\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.451201 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e8f947f-87bc-4215-8c50-2409fb2b274f-operator-scripts\") pod \"0e8f947f-87bc-4215-8c50-2409fb2b274f\" (UID: \"0e8f947f-87bc-4215-8c50-2409fb2b274f\") " Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.451244 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01fcd20e-f6ef-4ecc-b29a-98b053efae92-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "01fcd20e-f6ef-4ecc-b29a-98b053efae92" (UID: "01fcd20e-f6ef-4ecc-b29a-98b053efae92"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.451816 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhw5c\" (UniqueName: \"kubernetes.io/projected/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-kube-api-access-lhw5c\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.451845 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fb6rr\" (UniqueName: \"kubernetes.io/projected/26e5e627-4533-4800-bd50-826271c5dbef-kube-api-access-fb6rr\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.451856 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01fcd20e-f6ef-4ecc-b29a-98b053efae92-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.451866 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.451902 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26e5e627-4533-4800-bd50-826271c5dbef-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.452342 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e8f947f-87bc-4215-8c50-2409fb2b274f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0e8f947f-87bc-4215-8c50-2409fb2b274f" (UID: "0e8f947f-87bc-4215-8c50-2409fb2b274f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.453181 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f83726be-8013-490c-92c3-f19b0a04c112-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f83726be-8013-490c-92c3-f19b0a04c112" (UID: "f83726be-8013-490c-92c3-f19b0a04c112"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.453737 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/063192c5-ad89-4dde-bad6-af78e8bf8459-kube-api-access-lbs4s" (OuterVolumeSpecName: "kube-api-access-lbs4s") pod "063192c5-ad89-4dde-bad6-af78e8bf8459" (UID: "063192c5-ad89-4dde-bad6-af78e8bf8459"). InnerVolumeSpecName "kube-api-access-lbs4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.454030 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/063192c5-ad89-4dde-bad6-af78e8bf8459-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "063192c5-ad89-4dde-bad6-af78e8bf8459" (UID: "063192c5-ad89-4dde-bad6-af78e8bf8459"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.455526 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01fcd20e-f6ef-4ecc-b29a-98b053efae92-kube-api-access-q48kq" (OuterVolumeSpecName: "kube-api-access-q48kq") pod "01fcd20e-f6ef-4ecc-b29a-98b053efae92" (UID: "01fcd20e-f6ef-4ecc-b29a-98b053efae92"). InnerVolumeSpecName "kube-api-access-q48kq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.461652 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f83726be-8013-490c-92c3-f19b0a04c112-kube-api-access-c2k9h" (OuterVolumeSpecName: "kube-api-access-c2k9h") pod "f83726be-8013-490c-92c3-f19b0a04c112" (UID: "f83726be-8013-490c-92c3-f19b0a04c112"). InnerVolumeSpecName "kube-api-access-c2k9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.475330 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e8f947f-87bc-4215-8c50-2409fb2b274f-kube-api-access-lbcc7" (OuterVolumeSpecName: "kube-api-access-lbcc7") pod "0e8f947f-87bc-4215-8c50-2409fb2b274f" (UID: "0e8f947f-87bc-4215-8c50-2409fb2b274f"). InnerVolumeSpecName "kube-api-access-lbcc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.479731 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-kk829" event={"ID":"063192c5-ad89-4dde-bad6-af78e8bf8459","Type":"ContainerDied","Data":"9a4da016a62dbdc559c2ec02c48e21a4da63a8a9f69ae4f252099a319693b69e"} Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.479768 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a4da016a62dbdc559c2ec02c48e21a4da63a8a9f69ae4f252099a319693b69e" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.479831 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-kk829" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.486147 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-a9e3-account-create-update-hf4t4" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.486165 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-a9e3-account-create-update-hf4t4" event={"ID":"26e5e627-4533-4800-bd50-826271c5dbef","Type":"ContainerDied","Data":"bfc8bf756d8cba3aec537974e66a96862202dd5beeda2d9dc7571aaf373cae9c"} Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.486199 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bfc8bf756d8cba3aec537974e66a96862202dd5beeda2d9dc7571aaf373cae9c" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.487979 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-45fjr" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.487976 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-45fjr" event={"ID":"f83726be-8013-490c-92c3-f19b0a04c112","Type":"ContainerDied","Data":"da6885540248479148ffab0ff48b466eafde47fda35fc56129e8f035dc226750"} Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.488012 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da6885540248479148ffab0ff48b466eafde47fda35fc56129e8f035dc226750" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.489863 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0d95-account-create-update-jbjg4" event={"ID":"0e8f947f-87bc-4215-8c50-2409fb2b274f","Type":"ContainerDied","Data":"93c61127a607a63f2c40342713a244a0fe6a4327732dd028c34fb6c0208ffa98"} Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.489898 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93c61127a607a63f2c40342713a244a0fe6a4327732dd028c34fb6c0208ffa98" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.489918 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0d95-account-create-update-jbjg4" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.491637 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbf8-account-create-update-gxd7n" event={"ID":"01fcd20e-f6ef-4ecc-b29a-98b053efae92","Type":"ContainerDied","Data":"c48383f06e4444b9a4175f9d56cecb98afe823c24ec1ac84af258937d02b8e8c"} Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.491663 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c48383f06e4444b9a4175f9d56cecb98afe823c24ec1ac84af258937d02b8e8c" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.491681 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbf8-account-create-update-gxd7n" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.495902 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-5knks" event={"ID":"7be3ad58-dba8-419b-85e7-52ca0ae0fe3a","Type":"ContainerDied","Data":"0aa866eab1b34c79457fbeb6156b324bacd8d81116563189b0d45e555167f590"} Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.495925 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-5knks" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.495946 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0aa866eab1b34c79457fbeb6156b324bacd8d81116563189b0d45e555167f590" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.496984 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-g8kn6" event={"ID":"df850a16-e0b1-4d87-913a-a30a7f2365be","Type":"ContainerStarted","Data":"7e52c78064adeb3043fbf0eca024d63f926347170f06a4d28cf461a5a8c1d893"} Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.553276 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbs4s\" (UniqueName: \"kubernetes.io/projected/063192c5-ad89-4dde-bad6-af78e8bf8459-kube-api-access-lbs4s\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.553298 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbcc7\" (UniqueName: \"kubernetes.io/projected/0e8f947f-87bc-4215-8c50-2409fb2b274f-kube-api-access-lbcc7\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.553308 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q48kq\" (UniqueName: \"kubernetes.io/projected/01fcd20e-f6ef-4ecc-b29a-98b053efae92-kube-api-access-q48kq\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.553316 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f83726be-8013-490c-92c3-f19b0a04c112-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.553325 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2k9h\" (UniqueName: \"kubernetes.io/projected/f83726be-8013-490c-92c3-f19b0a04c112-kube-api-access-c2k9h\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.553333 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/063192c5-ad89-4dde-bad6-af78e8bf8459-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:26 crc kubenswrapper[4708]: I0203 07:27:26.553358 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e8f947f-87bc-4215-8c50-2409fb2b274f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:29 crc kubenswrapper[4708]: I0203 07:27:29.533186 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"a3e51579d1e27c46786e22752587ae527244704029cfc463b5f5933d8d8c92b4"} Feb 03 07:27:29 crc kubenswrapper[4708]: I0203 07:27:29.539510 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-g8kn6" event={"ID":"df850a16-e0b1-4d87-913a-a30a7f2365be","Type":"ContainerStarted","Data":"c635e016c7bce6032bf15498513c8a4b8b4614d153fdd74bacf749f8217b5492"} Feb 03 07:27:29 crc kubenswrapper[4708]: I0203 07:27:29.559417 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-g8kn6" podStartSLOduration=12.55939325 podStartE2EDuration="12.55939325s" podCreationTimestamp="2026-02-03 07:27:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:29.556782946 +0000 UTC m=+1028.538729773" watchObservedRunningTime="2026-02-03 07:27:29.55939325 +0000 UTC m=+1028.541340077" Feb 03 07:27:29 crc kubenswrapper[4708]: I0203 07:27:29.580820 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-pb4xp" podUID="3b5a2d58-5ebb-4838-a798-bc280fe99951" containerName="ovn-controller" probeResult="failure" output=< Feb 03 07:27:29 crc kubenswrapper[4708]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 03 07:27:29 crc kubenswrapper[4708]: > Feb 03 07:27:29 crc kubenswrapper[4708]: W0203 07:27:29.665174 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podefc4f37b_bfec_47a1_93be_28b47d107130.slice/crio-dc6100d638235a582ccf73275adfe5387a5dbe27e84b712e64aff731b9eb028e WatchSource:0}: Error finding container dc6100d638235a582ccf73275adfe5387a5dbe27e84b712e64aff731b9eb028e: Status 404 returned error can't find the container with id dc6100d638235a582ccf73275adfe5387a5dbe27e84b712e64aff731b9eb028e Feb 03 07:27:29 crc kubenswrapper[4708]: I0203 07:27:29.667953 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-pb4xp-config-kxzv9"] Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.569259 4708 generic.go:334] "Generic (PLEG): container finished" podID="efc4f37b-bfec-47a1-93be-28b47d107130" containerID="3d00c39fed00c0ab018d6f17b738c44a8d9ddec13c79629ada6f288f394739e1" exitCode=0 Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.570166 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pb4xp-config-kxzv9" event={"ID":"efc4f37b-bfec-47a1-93be-28b47d107130","Type":"ContainerDied","Data":"3d00c39fed00c0ab018d6f17b738c44a8d9ddec13c79629ada6f288f394739e1"} Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.570199 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pb4xp-config-kxzv9" event={"ID":"efc4f37b-bfec-47a1-93be-28b47d107130","Type":"ContainerStarted","Data":"dc6100d638235a582ccf73275adfe5387a5dbe27e84b712e64aff731b9eb028e"} Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.573886 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sgtsg" event={"ID":"adc80637-973b-4bd0-b444-4d2d41e23b8b","Type":"ContainerStarted","Data":"a4a7ee48486be6c0d8cb255566132c50e9692ed4d7ad09537dc3ba10be929495"} Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.579606 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"bcbb4ab23a6572afb52f47644f9ed233367aa4a0f3b9e9ed57d836bb0213cb50"} Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.579648 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"95438ff33dd1e10bfeed6384e9d44539a37d4a6ae00acb413042f6a8fbf26e70"} Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.579663 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"98f7d6e9dc6af0fc946c87647dca60b6ae20f08aee1911f3d1b4a32e3fba8afc"} Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.580984 4708 generic.go:334] "Generic (PLEG): container finished" podID="df850a16-e0b1-4d87-913a-a30a7f2365be" containerID="c635e016c7bce6032bf15498513c8a4b8b4614d153fdd74bacf749f8217b5492" exitCode=0 Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.581031 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-g8kn6" event={"ID":"df850a16-e0b1-4d87-913a-a30a7f2365be","Type":"ContainerDied","Data":"c635e016c7bce6032bf15498513c8a4b8b4614d153fdd74bacf749f8217b5492"} Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.582240 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-t95nr" event={"ID":"6201bbf3-523c-4a64-9703-fb0adbc0955a","Type":"ContainerStarted","Data":"bc6a69b873ed3d6240e916a34664035c89da8030345790479c4daa44494dccfa"} Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.619330 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-t95nr" podStartSLOduration=3.184379139 podStartE2EDuration="21.619308716s" podCreationTimestamp="2026-02-03 07:27:09 +0000 UTC" firstStartedPulling="2026-02-03 07:27:10.820439091 +0000 UTC m=+1009.802385898" lastFinishedPulling="2026-02-03 07:27:29.255368668 +0000 UTC m=+1028.237315475" observedRunningTime="2026-02-03 07:27:30.617652075 +0000 UTC m=+1029.599598892" watchObservedRunningTime="2026-02-03 07:27:30.619308716 +0000 UTC m=+1029.601255523" Feb 03 07:27:30 crc kubenswrapper[4708]: I0203 07:27:30.657525 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-sgtsg" podStartSLOduration=3.076991293 podStartE2EDuration="14.657506841s" podCreationTimestamp="2026-02-03 07:27:16 +0000 UTC" firstStartedPulling="2026-02-03 07:27:17.722336902 +0000 UTC m=+1016.704283709" lastFinishedPulling="2026-02-03 07:27:29.30285245 +0000 UTC m=+1028.284799257" observedRunningTime="2026-02-03 07:27:30.656734832 +0000 UTC m=+1029.638681669" watchObservedRunningTime="2026-02-03 07:27:30.657506841 +0000 UTC m=+1029.639453648" Feb 03 07:27:31 crc kubenswrapper[4708]: I0203 07:27:31.608394 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"6f73d8953b021a71f8da1efd8b87be36b5468dce33697f8da969caf8ae7c01fb"} Feb 03 07:27:31 crc kubenswrapper[4708]: I0203 07:27:31.608743 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"3854a9e378c65d3bfc3e4a12b06825e6d3d32dc7881652f159b712590d3d668b"} Feb 03 07:27:31 crc kubenswrapper[4708]: I0203 07:27:31.949715 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.025502 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-g8kn6" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.061956 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q72v8\" (UniqueName: \"kubernetes.io/projected/efc4f37b-bfec-47a1-93be-28b47d107130-kube-api-access-q72v8\") pod \"efc4f37b-bfec-47a1-93be-28b47d107130\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.062274 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-scripts\") pod \"efc4f37b-bfec-47a1-93be-28b47d107130\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.062349 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run-ovn\") pod \"efc4f37b-bfec-47a1-93be-28b47d107130\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.062381 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-additional-scripts\") pod \"efc4f37b-bfec-47a1-93be-28b47d107130\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.062480 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-log-ovn\") pod \"efc4f37b-bfec-47a1-93be-28b47d107130\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.062506 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run\") pod \"efc4f37b-bfec-47a1-93be-28b47d107130\" (UID: \"efc4f37b-bfec-47a1-93be-28b47d107130\") " Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.062839 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "efc4f37b-bfec-47a1-93be-28b47d107130" (UID: "efc4f37b-bfec-47a1-93be-28b47d107130"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.062973 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "efc4f37b-bfec-47a1-93be-28b47d107130" (UID: "efc4f37b-bfec-47a1-93be-28b47d107130"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.063008 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run" (OuterVolumeSpecName: "var-run") pod "efc4f37b-bfec-47a1-93be-28b47d107130" (UID: "efc4f37b-bfec-47a1-93be-28b47d107130"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.063064 4708 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.063703 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "efc4f37b-bfec-47a1-93be-28b47d107130" (UID: "efc4f37b-bfec-47a1-93be-28b47d107130"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.064170 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-scripts" (OuterVolumeSpecName: "scripts") pod "efc4f37b-bfec-47a1-93be-28b47d107130" (UID: "efc4f37b-bfec-47a1-93be-28b47d107130"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.069845 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efc4f37b-bfec-47a1-93be-28b47d107130-kube-api-access-q72v8" (OuterVolumeSpecName: "kube-api-access-q72v8") pod "efc4f37b-bfec-47a1-93be-28b47d107130" (UID: "efc4f37b-bfec-47a1-93be-28b47d107130"). InnerVolumeSpecName "kube-api-access-q72v8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.164280 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9t7tg\" (UniqueName: \"kubernetes.io/projected/df850a16-e0b1-4d87-913a-a30a7f2365be-kube-api-access-9t7tg\") pod \"df850a16-e0b1-4d87-913a-a30a7f2365be\" (UID: \"df850a16-e0b1-4d87-913a-a30a7f2365be\") " Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.164467 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df850a16-e0b1-4d87-913a-a30a7f2365be-operator-scripts\") pod \"df850a16-e0b1-4d87-913a-a30a7f2365be\" (UID: \"df850a16-e0b1-4d87-913a-a30a7f2365be\") " Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.164818 4708 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.164835 4708 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.164846 4708 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/efc4f37b-bfec-47a1-93be-28b47d107130-var-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.164855 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q72v8\" (UniqueName: \"kubernetes.io/projected/efc4f37b-bfec-47a1-93be-28b47d107130-kube-api-access-q72v8\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.164864 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/efc4f37b-bfec-47a1-93be-28b47d107130-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.165608 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df850a16-e0b1-4d87-913a-a30a7f2365be-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "df850a16-e0b1-4d87-913a-a30a7f2365be" (UID: "df850a16-e0b1-4d87-913a-a30a7f2365be"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.169072 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df850a16-e0b1-4d87-913a-a30a7f2365be-kube-api-access-9t7tg" (OuterVolumeSpecName: "kube-api-access-9t7tg") pod "df850a16-e0b1-4d87-913a-a30a7f2365be" (UID: "df850a16-e0b1-4d87-913a-a30a7f2365be"). InnerVolumeSpecName "kube-api-access-9t7tg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.266164 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df850a16-e0b1-4d87-913a-a30a7f2365be-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.266205 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9t7tg\" (UniqueName: \"kubernetes.io/projected/df850a16-e0b1-4d87-913a-a30a7f2365be-kube-api-access-9t7tg\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.624989 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"50628bd75243a6dccdcc667afd284153d12fbe3ffed35315fc19f73e8de4088a"} Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.625245 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"a1f46c2ebec0036d1580a49710d99d0c641d882884a8cdb87ea91f16ef5dcaa7"} Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.625257 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"0e5a82f1a2694a634d76ffd5e3f749c7436265365625dc32f97bcd3a01814f31"} Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.627566 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-g8kn6" event={"ID":"df850a16-e0b1-4d87-913a-a30a7f2365be","Type":"ContainerDied","Data":"7e52c78064adeb3043fbf0eca024d63f926347170f06a4d28cf461a5a8c1d893"} Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.627585 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e52c78064adeb3043fbf0eca024d63f926347170f06a4d28cf461a5a8c1d893" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.627631 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-g8kn6" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.636735 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pb4xp-config-kxzv9" event={"ID":"efc4f37b-bfec-47a1-93be-28b47d107130","Type":"ContainerDied","Data":"dc6100d638235a582ccf73275adfe5387a5dbe27e84b712e64aff731b9eb028e"} Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.636772 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc6100d638235a582ccf73275adfe5387a5dbe27e84b712e64aff731b9eb028e" Feb 03 07:27:32 crc kubenswrapper[4708]: I0203 07:27:32.636914 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pb4xp-config-kxzv9" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.048214 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-pb4xp-config-kxzv9"] Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.056344 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-pb4xp-config-kxzv9"] Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.164709 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-pb4xp-config-987p7"] Feb 03 07:27:33 crc kubenswrapper[4708]: E0203 07:27:33.165112 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7be3ad58-dba8-419b-85e7-52ca0ae0fe3a" containerName="mariadb-database-create" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165131 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="7be3ad58-dba8-419b-85e7-52ca0ae0fe3a" containerName="mariadb-database-create" Feb 03 07:27:33 crc kubenswrapper[4708]: E0203 07:27:33.165147 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc4f37b-bfec-47a1-93be-28b47d107130" containerName="ovn-config" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165155 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc4f37b-bfec-47a1-93be-28b47d107130" containerName="ovn-config" Feb 03 07:27:33 crc kubenswrapper[4708]: E0203 07:27:33.165168 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26e5e627-4533-4800-bd50-826271c5dbef" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165175 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="26e5e627-4533-4800-bd50-826271c5dbef" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: E0203 07:27:33.165187 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01fcd20e-f6ef-4ecc-b29a-98b053efae92" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165192 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="01fcd20e-f6ef-4ecc-b29a-98b053efae92" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: E0203 07:27:33.165209 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df850a16-e0b1-4d87-913a-a30a7f2365be" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165214 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="df850a16-e0b1-4d87-913a-a30a7f2365be" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: E0203 07:27:33.165222 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="063192c5-ad89-4dde-bad6-af78e8bf8459" containerName="mariadb-database-create" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165227 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="063192c5-ad89-4dde-bad6-af78e8bf8459" containerName="mariadb-database-create" Feb 03 07:27:33 crc kubenswrapper[4708]: E0203 07:27:33.165237 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e8f947f-87bc-4215-8c50-2409fb2b274f" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165243 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e8f947f-87bc-4215-8c50-2409fb2b274f" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: E0203 07:27:33.165253 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f83726be-8013-490c-92c3-f19b0a04c112" containerName="mariadb-database-create" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165259 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f83726be-8013-490c-92c3-f19b0a04c112" containerName="mariadb-database-create" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165432 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e8f947f-87bc-4215-8c50-2409fb2b274f" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165440 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="efc4f37b-bfec-47a1-93be-28b47d107130" containerName="ovn-config" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165450 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="063192c5-ad89-4dde-bad6-af78e8bf8459" containerName="mariadb-database-create" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165458 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="7be3ad58-dba8-419b-85e7-52ca0ae0fe3a" containerName="mariadb-database-create" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165468 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="26e5e627-4533-4800-bd50-826271c5dbef" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165479 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="f83726be-8013-490c-92c3-f19b0a04c112" containerName="mariadb-database-create" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165488 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="01fcd20e-f6ef-4ecc-b29a-98b053efae92" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.165533 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="df850a16-e0b1-4d87-913a-a30a7f2365be" containerName="mariadb-account-create-update" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.166424 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.170340 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.173609 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-pb4xp-config-987p7"] Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.286304 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgwnx\" (UniqueName: \"kubernetes.io/projected/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-kube-api-access-fgwnx\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.286366 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-scripts\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.286396 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.286589 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run-ovn\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.286612 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-log-ovn\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.286627 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-additional-scripts\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.387764 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run-ovn\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.387833 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-log-ovn\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.387849 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-additional-scripts\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.387901 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgwnx\" (UniqueName: \"kubernetes.io/projected/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-kube-api-access-fgwnx\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.387926 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-scripts\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.387947 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.388183 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run-ovn\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.388190 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-log-ovn\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.388187 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.388675 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-additional-scripts\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.389979 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-scripts\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.412674 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgwnx\" (UniqueName: \"kubernetes.io/projected/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-kube-api-access-fgwnx\") pod \"ovn-controller-pb4xp-config-987p7\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.495855 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.660758 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"1466b383fdb65991bac01824046dfa068e92ffd5ca45d67cd6bdcc99e68a49a1"} Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.661322 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"a0593ff7-ba15-46be-8879-70dc42f3beb2","Type":"ContainerStarted","Data":"f66f2074e8b580617be0d7e1ed95d8fd7de27abe8ae994839cff2f4a83c3b334"} Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.709953 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.849415002 podStartE2EDuration="52.70993389s" podCreationTimestamp="2026-02-03 07:26:41 +0000 UTC" firstStartedPulling="2026-02-03 07:27:15.247688625 +0000 UTC m=+1014.229635432" lastFinishedPulling="2026-02-03 07:27:31.108207473 +0000 UTC m=+1030.090154320" observedRunningTime="2026-02-03 07:27:33.706265461 +0000 UTC m=+1032.688212268" watchObservedRunningTime="2026-02-03 07:27:33.70993389 +0000 UTC m=+1032.691880697" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.968065 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-gjbwv"] Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.969775 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:33 crc kubenswrapper[4708]: W0203 07:27:33.969858 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fa4198d_bbf3_4ce9_869d_bc182f9d33fa.slice/crio-e3edd8ab58197b62c6a43ff6ab79162a9fea2027e88e2b99c8c8e1e1fbd32375 WatchSource:0}: Error finding container e3edd8ab58197b62c6a43ff6ab79162a9fea2027e88e2b99c8c8e1e1fbd32375: Status 404 returned error can't find the container with id e3edd8ab58197b62c6a43ff6ab79162a9fea2027e88e2b99c8c8e1e1fbd32375 Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.972301 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Feb 03 07:27:33 crc kubenswrapper[4708]: I0203 07:27:33.997909 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-pb4xp-config-987p7"] Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.010849 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-gjbwv"] Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.100252 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-svc\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.100323 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjwsf\" (UniqueName: \"kubernetes.io/projected/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-kube-api-access-kjwsf\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.100439 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.100497 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.100537 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-config\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.100583 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.106027 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efc4f37b-bfec-47a1-93be-28b47d107130" path="/var/lib/kubelet/pods/efc4f37b-bfec-47a1-93be-28b47d107130/volumes" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.202444 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.202516 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-svc\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.202536 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjwsf\" (UniqueName: \"kubernetes.io/projected/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-kube-api-access-kjwsf\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.202596 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.202661 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.202691 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-config\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.203488 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.203542 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-config\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.203611 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-svc\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.204090 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.204642 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.223753 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjwsf\" (UniqueName: \"kubernetes.io/projected/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-kube-api-access-kjwsf\") pod \"dnsmasq-dns-764c5664d7-gjbwv\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.323046 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.583706 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-pb4xp" Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.673875 4708 generic.go:334] "Generic (PLEG): container finished" podID="8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" containerID="7149d37923a18dfa510a1746af23e264541c58d6c9edbae1fa55797eefc62feb" exitCode=0 Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.673950 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pb4xp-config-987p7" event={"ID":"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa","Type":"ContainerDied","Data":"7149d37923a18dfa510a1746af23e264541c58d6c9edbae1fa55797eefc62feb"} Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.673978 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pb4xp-config-987p7" event={"ID":"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa","Type":"ContainerStarted","Data":"e3edd8ab58197b62c6a43ff6ab79162a9fea2027e88e2b99c8c8e1e1fbd32375"} Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.676329 4708 generic.go:334] "Generic (PLEG): container finished" podID="adc80637-973b-4bd0-b444-4d2d41e23b8b" containerID="a4a7ee48486be6c0d8cb255566132c50e9692ed4d7ad09537dc3ba10be929495" exitCode=0 Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.676424 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sgtsg" event={"ID":"adc80637-973b-4bd0-b444-4d2d41e23b8b","Type":"ContainerDied","Data":"a4a7ee48486be6c0d8cb255566132c50e9692ed4d7ad09537dc3ba10be929495"} Feb 03 07:27:34 crc kubenswrapper[4708]: I0203 07:27:34.804183 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-gjbwv"] Feb 03 07:27:35 crc kubenswrapper[4708]: I0203 07:27:35.683874 4708 generic.go:334] "Generic (PLEG): container finished" podID="1a9d62cd-7eb6-4c16-ab2e-3051620d2650" containerID="a29f8a0ca7da0417c42d2598f4814b8bd972907ab59161087a7b6297ab3573a8" exitCode=0 Feb 03 07:27:35 crc kubenswrapper[4708]: I0203 07:27:35.683942 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" event={"ID":"1a9d62cd-7eb6-4c16-ab2e-3051620d2650","Type":"ContainerDied","Data":"a29f8a0ca7da0417c42d2598f4814b8bd972907ab59161087a7b6297ab3573a8"} Feb 03 07:27:35 crc kubenswrapper[4708]: I0203 07:27:35.684274 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" event={"ID":"1a9d62cd-7eb6-4c16-ab2e-3051620d2650","Type":"ContainerStarted","Data":"e4ec1201f8aa9beefe4f04f8d8ae9440f6e532bad66f325a3c2fdab79faad2bf"} Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.014253 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.022124 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133294 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run-ovn\") pod \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133364 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-config-data\") pod \"adc80637-973b-4bd0-b444-4d2d41e23b8b\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133378 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" (UID: "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133408 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-combined-ca-bundle\") pod \"adc80637-973b-4bd0-b444-4d2d41e23b8b\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133487 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-scripts\") pod \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133542 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqkpp\" (UniqueName: \"kubernetes.io/projected/adc80637-973b-4bd0-b444-4d2d41e23b8b-kube-api-access-vqkpp\") pod \"adc80637-973b-4bd0-b444-4d2d41e23b8b\" (UID: \"adc80637-973b-4bd0-b444-4d2d41e23b8b\") " Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133599 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-log-ovn\") pod \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133678 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run\") pod \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133727 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgwnx\" (UniqueName: \"kubernetes.io/projected/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-kube-api-access-fgwnx\") pod \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133760 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-additional-scripts\") pod \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\" (UID: \"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa\") " Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133826 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run" (OuterVolumeSpecName: "var-run") pod "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" (UID: "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.133742 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" (UID: "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.134085 4708 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.134244 4708 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.134620 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" (UID: "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.134855 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-scripts" (OuterVolumeSpecName: "scripts") pod "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" (UID: "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.138276 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-kube-api-access-fgwnx" (OuterVolumeSpecName: "kube-api-access-fgwnx") pod "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" (UID: "8fa4198d-bbf3-4ce9-869d-bc182f9d33fa"). InnerVolumeSpecName "kube-api-access-fgwnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.138380 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adc80637-973b-4bd0-b444-4d2d41e23b8b-kube-api-access-vqkpp" (OuterVolumeSpecName: "kube-api-access-vqkpp") pod "adc80637-973b-4bd0-b444-4d2d41e23b8b" (UID: "adc80637-973b-4bd0-b444-4d2d41e23b8b"). InnerVolumeSpecName "kube-api-access-vqkpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.160340 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "adc80637-973b-4bd0-b444-4d2d41e23b8b" (UID: "adc80637-973b-4bd0-b444-4d2d41e23b8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.177225 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-config-data" (OuterVolumeSpecName: "config-data") pod "adc80637-973b-4bd0-b444-4d2d41e23b8b" (UID: "adc80637-973b-4bd0-b444-4d2d41e23b8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.236068 4708 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.236112 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgwnx\" (UniqueName: \"kubernetes.io/projected/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-kube-api-access-fgwnx\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.236125 4708 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.236139 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.236150 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adc80637-973b-4bd0-b444-4d2d41e23b8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.236162 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.236175 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqkpp\" (UniqueName: \"kubernetes.io/projected/adc80637-973b-4bd0-b444-4d2d41e23b8b-kube-api-access-vqkpp\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.691884 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" event={"ID":"1a9d62cd-7eb6-4c16-ab2e-3051620d2650","Type":"ContainerStarted","Data":"08b500d7d8e54f3d56a9ef32b69f3f3f8a5f1fa410ff74f2965c27140bffe4df"} Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.692001 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.697727 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pb4xp-config-987p7" event={"ID":"8fa4198d-bbf3-4ce9-869d-bc182f9d33fa","Type":"ContainerDied","Data":"e3edd8ab58197b62c6a43ff6ab79162a9fea2027e88e2b99c8c8e1e1fbd32375"} Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.697751 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pb4xp-config-987p7" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.697758 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3edd8ab58197b62c6a43ff6ab79162a9fea2027e88e2b99c8c8e1e1fbd32375" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.699417 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-sgtsg" event={"ID":"adc80637-973b-4bd0-b444-4d2d41e23b8b","Type":"ContainerDied","Data":"0e1f121a8862b9efb196f8cc200b88b9afc88572b0302dd8bc79d94ead19c344"} Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.699442 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e1f121a8862b9efb196f8cc200b88b9afc88572b0302dd8bc79d94ead19c344" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.699485 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-sgtsg" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.715649 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" podStartSLOduration=3.715631027 podStartE2EDuration="3.715631027s" podCreationTimestamp="2026-02-03 07:27:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:36.710781818 +0000 UTC m=+1035.692728625" watchObservedRunningTime="2026-02-03 07:27:36.715631027 +0000 UTC m=+1035.697577834" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.969290 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-gjbwv"] Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.985332 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7lhsc"] Feb 03 07:27:36 crc kubenswrapper[4708]: E0203 07:27:36.985756 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc80637-973b-4bd0-b444-4d2d41e23b8b" containerName="keystone-db-sync" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.985775 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc80637-973b-4bd0-b444-4d2d41e23b8b" containerName="keystone-db-sync" Feb 03 07:27:36 crc kubenswrapper[4708]: E0203 07:27:36.985834 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" containerName="ovn-config" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.985843 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" containerName="ovn-config" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.986032 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" containerName="ovn-config" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.986063 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="adc80637-973b-4bd0-b444-4d2d41e23b8b" containerName="keystone-db-sync" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.986750 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.988552 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.989446 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.989685 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.991104 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x5pvc" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.991346 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 07:27:36 crc kubenswrapper[4708]: I0203 07:27:36.993769 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7lhsc"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.026877 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-rvq47"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.028153 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.063446 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-rvq47"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152323 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-svc\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152395 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7dk6\" (UniqueName: \"kubernetes.io/projected/714caa63-0e8e-4d53-addd-11092a5b68c3-kube-api-access-h7dk6\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152429 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152463 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-848wd\" (UniqueName: \"kubernetes.io/projected/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-kube-api-access-848wd\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152557 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152604 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-scripts\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152625 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-fernet-keys\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152653 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-combined-ca-bundle\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152677 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-config-data\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152694 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152715 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-config\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.152734 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-credential-keys\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.170868 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-pb4xp-config-987p7"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.187657 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-pb4xp-config-987p7"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.209877 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-bbf1-account-create-update-5htsg"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.210989 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-bbf1-account-create-update-5htsg" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.213932 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-db-secret" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.216383 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-db-create-ktbjr"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.220978 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-create-ktbjr" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.238561 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-cxbc2"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.239483 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.245280 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.245764 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.246062 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nb4t6" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.250836 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-db-create-ktbjr"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255542 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255587 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-scripts\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255603 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-fernet-keys\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255641 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-combined-ca-bundle\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255664 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-config-data\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255681 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255699 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-config\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255717 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-credential-keys\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255736 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-svc\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255756 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7dk6\" (UniqueName: \"kubernetes.io/projected/714caa63-0e8e-4d53-addd-11092a5b68c3-kube-api-access-h7dk6\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255775 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.255814 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-848wd\" (UniqueName: \"kubernetes.io/projected/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-kube-api-access-848wd\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.257536 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.258199 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-config\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.258639 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.258773 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-svc\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.259311 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.262219 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-credential-keys\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.262279 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-fernet-keys\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.283124 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-combined-ca-bundle\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.287331 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-bbf1-account-create-update-5htsg"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.292532 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-scripts\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.293036 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-848wd\" (UniqueName: \"kubernetes.io/projected/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-kube-api-access-848wd\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.293399 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-config-data\") pod \"keystone-bootstrap-7lhsc\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.302819 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7dk6\" (UniqueName: \"kubernetes.io/projected/714caa63-0e8e-4d53-addd-11092a5b68c3-kube-api-access-h7dk6\") pod \"dnsmasq-dns-5959f8865f-rvq47\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.306409 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.342436 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-cxbc2"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.349173 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.358132 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rn9f\" (UniqueName: \"kubernetes.io/projected/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-kube-api-access-7rn9f\") pod \"ironic-bbf1-account-create-update-5htsg\" (UID: \"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0\") " pod="openstack/ironic-bbf1-account-create-update-5htsg" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.358207 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-config\") pod \"neutron-db-sync-cxbc2\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.358230 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-combined-ca-bundle\") pod \"neutron-db-sync-cxbc2\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.358268 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pwrw\" (UniqueName: \"kubernetes.io/projected/4c0d871a-3a87-4651-8392-e69acf628940-kube-api-access-2pwrw\") pod \"ironic-db-create-ktbjr\" (UID: \"4c0d871a-3a87-4651-8392-e69acf628940\") " pod="openstack/ironic-db-create-ktbjr" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.358411 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c582k\" (UniqueName: \"kubernetes.io/projected/dcaf1aa4-0bde-49a7-a027-140450f08736-kube-api-access-c582k\") pod \"neutron-db-sync-cxbc2\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.358517 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c0d871a-3a87-4651-8392-e69acf628940-operator-scripts\") pod \"ironic-db-create-ktbjr\" (UID: \"4c0d871a-3a87-4651-8392-e69acf628940\") " pod="openstack/ironic-db-create-ktbjr" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.358584 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-operator-scripts\") pod \"ironic-bbf1-account-create-update-5htsg\" (UID: \"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0\") " pod="openstack/ironic-bbf1-account-create-update-5htsg" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.444312 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-2smzn"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.446572 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.451423 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-6xzsv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.451587 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.467318 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rn9f\" (UniqueName: \"kubernetes.io/projected/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-kube-api-access-7rn9f\") pod \"ironic-bbf1-account-create-update-5htsg\" (UID: \"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0\") " pod="openstack/ironic-bbf1-account-create-update-5htsg" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.467380 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-config\") pod \"neutron-db-sync-cxbc2\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.467404 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-combined-ca-bundle\") pod \"neutron-db-sync-cxbc2\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.467448 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pwrw\" (UniqueName: \"kubernetes.io/projected/4c0d871a-3a87-4651-8392-e69acf628940-kube-api-access-2pwrw\") pod \"ironic-db-create-ktbjr\" (UID: \"4c0d871a-3a87-4651-8392-e69acf628940\") " pod="openstack/ironic-db-create-ktbjr" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.467488 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c582k\" (UniqueName: \"kubernetes.io/projected/dcaf1aa4-0bde-49a7-a027-140450f08736-kube-api-access-c582k\") pod \"neutron-db-sync-cxbc2\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.467532 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c0d871a-3a87-4651-8392-e69acf628940-operator-scripts\") pod \"ironic-db-create-ktbjr\" (UID: \"4c0d871a-3a87-4651-8392-e69acf628940\") " pod="openstack/ironic-db-create-ktbjr" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.467588 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-operator-scripts\") pod \"ironic-bbf1-account-create-update-5htsg\" (UID: \"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0\") " pod="openstack/ironic-bbf1-account-create-update-5htsg" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.470475 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-operator-scripts\") pod \"ironic-bbf1-account-create-update-5htsg\" (UID: \"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0\") " pod="openstack/ironic-bbf1-account-create-update-5htsg" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.471189 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c0d871a-3a87-4651-8392-e69acf628940-operator-scripts\") pod \"ironic-db-create-ktbjr\" (UID: \"4c0d871a-3a87-4651-8392-e69acf628940\") " pod="openstack/ironic-db-create-ktbjr" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.474202 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-combined-ca-bundle\") pod \"neutron-db-sync-cxbc2\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.491356 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-config\") pod \"neutron-db-sync-cxbc2\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.493241 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c582k\" (UniqueName: \"kubernetes.io/projected/dcaf1aa4-0bde-49a7-a027-140450f08736-kube-api-access-c582k\") pod \"neutron-db-sync-cxbc2\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.493643 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rn9f\" (UniqueName: \"kubernetes.io/projected/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-kube-api-access-7rn9f\") pod \"ironic-bbf1-account-create-update-5htsg\" (UID: \"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0\") " pod="openstack/ironic-bbf1-account-create-update-5htsg" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.508184 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.509224 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pwrw\" (UniqueName: \"kubernetes.io/projected/4c0d871a-3a87-4651-8392-e69acf628940-kube-api-access-2pwrw\") pod \"ironic-db-create-ktbjr\" (UID: \"4c0d871a-3a87-4651-8392-e69acf628940\") " pod="openstack/ironic-db-create-ktbjr" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.513101 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-2smzn"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.531590 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-w75bv"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.532732 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.541107 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-w75bv"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.546116 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-jlkn6" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.546509 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.546734 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.558693 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.561490 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.571363 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-rvq47"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.573457 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.574311 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxlq6\" (UniqueName: \"kubernetes.io/projected/fde8edd5-50e0-4bb0-8701-54e0998444a1-kube-api-access-xxlq6\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.574371 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-combined-ca-bundle\") pod \"barbican-db-sync-2smzn\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.574399 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg54j\" (UniqueName: \"kubernetes.io/projected/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-kube-api-access-cg54j\") pod \"barbican-db-sync-2smzn\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.574427 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-db-sync-config-data\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.574449 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-config-data\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.574475 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-scripts\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.574492 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-db-sync-config-data\") pod \"barbican-db-sync-2smzn\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.574557 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fde8edd5-50e0-4bb0-8701-54e0998444a1-etc-machine-id\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.574591 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-combined-ca-bundle\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.575598 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.580974 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.590473 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-cl2c7"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.591313 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-bbf1-account-create-update-5htsg" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.591949 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.594335 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.594600 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.594845 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-lk7kq" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.596950 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-n7s6l"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.598205 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.610017 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-cl2c7"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.618286 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-n7s6l"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.675813 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-log-httpd\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.675855 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fde8edd5-50e0-4bb0-8701-54e0998444a1-etc-machine-id\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.675873 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.675895 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-config-data\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.675913 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-combined-ca-bundle\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.675937 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-run-httpd\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.675957 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-config-data\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.675976 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxlq6\" (UniqueName: \"kubernetes.io/projected/fde8edd5-50e0-4bb0-8701-54e0998444a1-kube-api-access-xxlq6\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.675996 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-combined-ca-bundle\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676022 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-scripts\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676041 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-combined-ca-bundle\") pod \"barbican-db-sync-2smzn\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676058 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws9bf\" (UniqueName: \"kubernetes.io/projected/46305c26-487f-4187-a8ae-48fd2319d25a-kube-api-access-ws9bf\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676080 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg54j\" (UniqueName: \"kubernetes.io/projected/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-kube-api-access-cg54j\") pod \"barbican-db-sync-2smzn\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676103 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676121 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-db-sync-config-data\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676139 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-config\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676155 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-config-data\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676172 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676196 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69v4f\" (UniqueName: \"kubernetes.io/projected/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-kube-api-access-69v4f\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676214 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-scripts\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676231 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-db-sync-config-data\") pod \"barbican-db-sync-2smzn\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676247 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-logs\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676266 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676286 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676303 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-scripts\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676324 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676344 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtqf5\" (UniqueName: \"kubernetes.io/projected/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-kube-api-access-xtqf5\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.676443 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fde8edd5-50e0-4bb0-8701-54e0998444a1-etc-machine-id\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.682393 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-db-sync-config-data\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.684136 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-db-sync-config-data\") pod \"barbican-db-sync-2smzn\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.684972 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-combined-ca-bundle\") pod \"barbican-db-sync-2smzn\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.685057 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-config-data\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.688701 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-combined-ca-bundle\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.704401 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxlq6\" (UniqueName: \"kubernetes.io/projected/fde8edd5-50e0-4bb0-8701-54e0998444a1-kube-api-access-xxlq6\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.707518 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg54j\" (UniqueName: \"kubernetes.io/projected/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-kube-api-access-cg54j\") pod \"barbican-db-sync-2smzn\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.715279 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-scripts\") pod \"cinder-db-sync-w75bv\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.778836 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-scripts\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.778888 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws9bf\" (UniqueName: \"kubernetes.io/projected/46305c26-487f-4187-a8ae-48fd2319d25a-kube-api-access-ws9bf\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.778947 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.778970 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-config\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.778991 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779012 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69v4f\" (UniqueName: \"kubernetes.io/projected/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-kube-api-access-69v4f\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779034 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-logs\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779066 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779087 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779102 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-scripts\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779129 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779150 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtqf5\" (UniqueName: \"kubernetes.io/projected/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-kube-api-access-xtqf5\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779193 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-log-httpd\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779211 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779234 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-config-data\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779259 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-run-httpd\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779281 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-config-data\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.779313 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-combined-ca-bundle\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.788563 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-combined-ca-bundle\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.789758 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-logs\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.790770 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.792006 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-config\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.792922 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.793417 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-log-httpd\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.795980 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.807554 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-run-httpd\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.807987 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.808858 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-create-ktbjr" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.809588 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-scripts\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.815346 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-rvq47"] Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.816772 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.825249 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-scripts\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.827914 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.828254 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2smzn" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.832734 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69v4f\" (UniqueName: \"kubernetes.io/projected/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-kube-api-access-69v4f\") pod \"dnsmasq-dns-58dd9ff6bc-n7s6l\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.833507 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws9bf\" (UniqueName: \"kubernetes.io/projected/46305c26-487f-4187-a8ae-48fd2319d25a-kube-api-access-ws9bf\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.838218 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-config-data\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.843581 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtqf5\" (UniqueName: \"kubernetes.io/projected/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-kube-api-access-xtqf5\") pod \"placement-db-sync-cl2c7\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.858003 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-config-data\") pod \"ceilometer-0\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.888185 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-w75bv" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.899349 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.921269 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cl2c7" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.934312 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:37 crc kubenswrapper[4708]: I0203 07:27:37.979611 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-cxbc2"] Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.026227 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7lhsc"] Feb 03 07:27:38 crc kubenswrapper[4708]: W0203 07:27:38.044152 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddcaf1aa4_0bde_49a7_a027_140450f08736.slice/crio-7e2ebc0528c8f635830dac451865fe849e068fa0b5a21a03f8c2d01a409c8c2d WatchSource:0}: Error finding container 7e2ebc0528c8f635830dac451865fe849e068fa0b5a21a03f8c2d01a409c8c2d: Status 404 returned error can't find the container with id 7e2ebc0528c8f635830dac451865fe849e068fa0b5a21a03f8c2d01a409c8c2d Feb 03 07:27:38 crc kubenswrapper[4708]: W0203 07:27:38.069111 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2af44247_24f0_4b7b_aac1_f431a0ba0eb2.slice/crio-8a4ff1d49ba063fe941e00a11a84f6eb58a29adf7fdf3f8d8a72dab153a75310 WatchSource:0}: Error finding container 8a4ff1d49ba063fe941e00a11a84f6eb58a29adf7fdf3f8d8a72dab153a75310: Status 404 returned error can't find the container with id 8a4ff1d49ba063fe941e00a11a84f6eb58a29adf7fdf3f8d8a72dab153a75310 Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.129511 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fa4198d-bbf3-4ce9-869d-bc182f9d33fa" path="/var/lib/kubelet/pods/8fa4198d-bbf3-4ce9-869d-bc182f9d33fa/volumes" Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.309412 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-bbf1-account-create-update-5htsg"] Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.327587 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-db-create-ktbjr"] Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.735330 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-bbf1-account-create-update-5htsg" event={"ID":"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0","Type":"ContainerStarted","Data":"67309d8772b73bcf4c84ee3b08b1136aed2d4364ce3570c344368ba8a1fdaf63"} Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.741624 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-n7s6l"] Feb 03 07:27:38 crc kubenswrapper[4708]: W0203 07:27:38.742423 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21f50ebb_9d66_4b4b_bde7_b3e0e8057feb.slice/crio-9294024126ea6c2ad6b896e3924c5371eefe4b48eec5d8df8c6d802188c39216 WatchSource:0}: Error finding container 9294024126ea6c2ad6b896e3924c5371eefe4b48eec5d8df8c6d802188c39216: Status 404 returned error can't find the container with id 9294024126ea6c2ad6b896e3924c5371eefe4b48eec5d8df8c6d802188c39216 Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.743209 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7lhsc" event={"ID":"2af44247-24f0-4b7b-aac1-f431a0ba0eb2","Type":"ContainerStarted","Data":"94fb1a6d62db6177569169006262b4aa64eb823c98086daf1735d3a05ebabe81"} Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.743253 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7lhsc" event={"ID":"2af44247-24f0-4b7b-aac1-f431a0ba0eb2","Type":"ContainerStarted","Data":"8a4ff1d49ba063fe941e00a11a84f6eb58a29adf7fdf3f8d8a72dab153a75310"} Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.746094 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-create-ktbjr" event={"ID":"4c0d871a-3a87-4651-8392-e69acf628940","Type":"ContainerStarted","Data":"d47be63d5cc91fe7a51f8fc6cf8b7e4c93c6f026780a752ca254c8ed9c9ff180"} Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.748205 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-rvq47" event={"ID":"714caa63-0e8e-4d53-addd-11092a5b68c3","Type":"ContainerStarted","Data":"dc0d4736890cd89e50aaa3f257682eed2aaeee5491c7a45ee7e1214ab10ae54b"} Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.748242 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-rvq47" event={"ID":"714caa63-0e8e-4d53-addd-11092a5b68c3","Type":"ContainerStarted","Data":"b292ab2c788be0d8d2d9cf90b06317eab213a4b010fbdf0ea692c22e1e783183"} Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.757437 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-2smzn"] Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.761986 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" podUID="1a9d62cd-7eb6-4c16-ab2e-3051620d2650" containerName="dnsmasq-dns" containerID="cri-o://08b500d7d8e54f3d56a9ef32b69f3f3f8a5f1fa410ff74f2965c27140bffe4df" gracePeriod=10 Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.762282 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cxbc2" event={"ID":"dcaf1aa4-0bde-49a7-a027-140450f08736","Type":"ContainerStarted","Data":"efd0cc66c29a29935ef515388230d893b8545aede61375169757b998a1a0416c"} Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.762310 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cxbc2" event={"ID":"dcaf1aa4-0bde-49a7-a027-140450f08736","Type":"ContainerStarted","Data":"7e2ebc0528c8f635830dac451865fe849e068fa0b5a21a03f8c2d01a409c8c2d"} Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.806187 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-cl2c7"] Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.883079 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:27:38 crc kubenswrapper[4708]: W0203 07:27:38.885782 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46305c26_487f_4187_a8ae_48fd2319d25a.slice/crio-12cce954c343dca2cd354edb3d7ac900ee66c854d07cc95ee45749ed5bc05f77 WatchSource:0}: Error finding container 12cce954c343dca2cd354edb3d7ac900ee66c854d07cc95ee45749ed5bc05f77: Status 404 returned error can't find the container with id 12cce954c343dca2cd354edb3d7ac900ee66c854d07cc95ee45749ed5bc05f77 Feb 03 07:27:38 crc kubenswrapper[4708]: I0203 07:27:38.931357 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-w75bv"] Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.363717 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.771708 4708 generic.go:334] "Generic (PLEG): container finished" podID="714caa63-0e8e-4d53-addd-11092a5b68c3" containerID="dc0d4736890cd89e50aaa3f257682eed2aaeee5491c7a45ee7e1214ab10ae54b" exitCode=0 Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.771821 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-rvq47" event={"ID":"714caa63-0e8e-4d53-addd-11092a5b68c3","Type":"ContainerDied","Data":"dc0d4736890cd89e50aaa3f257682eed2aaeee5491c7a45ee7e1214ab10ae54b"} Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.776444 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-w75bv" event={"ID":"fde8edd5-50e0-4bb0-8701-54e0998444a1","Type":"ContainerStarted","Data":"6077bdda0c5e511ea0097263c84f0cb5a7e6f035b8d609e351a8b4d00aa59987"} Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.778185 4708 generic.go:334] "Generic (PLEG): container finished" podID="1a9d62cd-7eb6-4c16-ab2e-3051620d2650" containerID="08b500d7d8e54f3d56a9ef32b69f3f3f8a5f1fa410ff74f2965c27140bffe4df" exitCode=0 Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.778231 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" event={"ID":"1a9d62cd-7eb6-4c16-ab2e-3051620d2650","Type":"ContainerDied","Data":"08b500d7d8e54f3d56a9ef32b69f3f3f8a5f1fa410ff74f2965c27140bffe4df"} Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.779633 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-bbf1-account-create-update-5htsg" event={"ID":"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0","Type":"ContainerStarted","Data":"80d477dbaa1060a84fd0e7ac381dbbcd5f194e8f203da36a548b95de6f0cce3f"} Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.799010 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-create-ktbjr" event={"ID":"4c0d871a-3a87-4651-8392-e69acf628940","Type":"ContainerStarted","Data":"004112adfeeb812b356b4367486306021c116a343bd149394818843e76cea9ea"} Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.808074 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" event={"ID":"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb","Type":"ContainerStarted","Data":"791324fb13eabbddd2baf9744b59a7937725c75a27bd96bcd6d8f15d6ca60319"} Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.808122 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" event={"ID":"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb","Type":"ContainerStarted","Data":"9294024126ea6c2ad6b896e3924c5371eefe4b48eec5d8df8c6d802188c39216"} Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.813108 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cl2c7" event={"ID":"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29","Type":"ContainerStarted","Data":"83f79bbb97d9dbc4caf3d455587c8dbe20dcacaacdac023418218e23e6763f85"} Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.822459 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2smzn" event={"ID":"1a3122ca-fa36-4033-a44f-36d12d0b3f4a","Type":"ContainerStarted","Data":"0131735411e070d57ab066ad86f53664b0cbbfe1d4e9e894851a40541ea676b3"} Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.827557 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46305c26-487f-4187-a8ae-48fd2319d25a","Type":"ContainerStarted","Data":"12cce954c343dca2cd354edb3d7ac900ee66c854d07cc95ee45749ed5bc05f77"} Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.854587 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-bbf1-account-create-update-5htsg" podStartSLOduration=2.854563774 podStartE2EDuration="2.854563774s" podCreationTimestamp="2026-02-03 07:27:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:39.806499698 +0000 UTC m=+1038.788446505" watchObservedRunningTime="2026-02-03 07:27:39.854563774 +0000 UTC m=+1038.836510581" Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.877098 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-db-create-ktbjr" podStartSLOduration=2.8770759249999998 podStartE2EDuration="2.877075925s" podCreationTimestamp="2026-02-03 07:27:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:39.822914689 +0000 UTC m=+1038.804861496" watchObservedRunningTime="2026-02-03 07:27:39.877075925 +0000 UTC m=+1038.859022732" Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.893748 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-cxbc2" podStartSLOduration=2.893730463 podStartE2EDuration="2.893730463s" podCreationTimestamp="2026-02-03 07:27:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:39.860016818 +0000 UTC m=+1038.841963625" watchObservedRunningTime="2026-02-03 07:27:39.893730463 +0000 UTC m=+1038.875677270" Feb 03 07:27:39 crc kubenswrapper[4708]: I0203 07:27:39.898507 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7lhsc" podStartSLOduration=3.8984942289999998 podStartE2EDuration="3.898494229s" podCreationTimestamp="2026-02-03 07:27:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:39.876330258 +0000 UTC m=+1038.858277065" watchObservedRunningTime="2026-02-03 07:27:39.898494229 +0000 UTC m=+1038.880441036" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.065348 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.138839 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-config\") pod \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.139173 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-svc\") pod \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.139652 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-nb\") pod \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.139691 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-swift-storage-0\") pod \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.139731 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjwsf\" (UniqueName: \"kubernetes.io/projected/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-kube-api-access-kjwsf\") pod \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.139774 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-sb\") pod \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\" (UID: \"1a9d62cd-7eb6-4c16-ab2e-3051620d2650\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.152934 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-kube-api-access-kjwsf" (OuterVolumeSpecName: "kube-api-access-kjwsf") pod "1a9d62cd-7eb6-4c16-ab2e-3051620d2650" (UID: "1a9d62cd-7eb6-4c16-ab2e-3051620d2650"). InnerVolumeSpecName "kube-api-access-kjwsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.193619 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1a9d62cd-7eb6-4c16-ab2e-3051620d2650" (UID: "1a9d62cd-7eb6-4c16-ab2e-3051620d2650"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.200547 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a9d62cd-7eb6-4c16-ab2e-3051620d2650" (UID: "1a9d62cd-7eb6-4c16-ab2e-3051620d2650"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.203408 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1a9d62cd-7eb6-4c16-ab2e-3051620d2650" (UID: "1a9d62cd-7eb6-4c16-ab2e-3051620d2650"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.232275 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-config" (OuterVolumeSpecName: "config") pod "1a9d62cd-7eb6-4c16-ab2e-3051620d2650" (UID: "1a9d62cd-7eb6-4c16-ab2e-3051620d2650"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.243951 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.243981 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.243992 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.244002 4708 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.244012 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjwsf\" (UniqueName: \"kubernetes.io/projected/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-kube-api-access-kjwsf\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.247382 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1a9d62cd-7eb6-4c16-ab2e-3051620d2650" (UID: "1a9d62cd-7eb6-4c16-ab2e-3051620d2650"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.262189 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.345270 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-config\") pod \"714caa63-0e8e-4d53-addd-11092a5b68c3\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.345325 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-nb\") pod \"714caa63-0e8e-4d53-addd-11092a5b68c3\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.345429 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-svc\") pod \"714caa63-0e8e-4d53-addd-11092a5b68c3\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.345484 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-sb\") pod \"714caa63-0e8e-4d53-addd-11092a5b68c3\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.345500 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-swift-storage-0\") pod \"714caa63-0e8e-4d53-addd-11092a5b68c3\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.345537 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7dk6\" (UniqueName: \"kubernetes.io/projected/714caa63-0e8e-4d53-addd-11092a5b68c3-kube-api-access-h7dk6\") pod \"714caa63-0e8e-4d53-addd-11092a5b68c3\" (UID: \"714caa63-0e8e-4d53-addd-11092a5b68c3\") " Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.345813 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a9d62cd-7eb6-4c16-ab2e-3051620d2650-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.349104 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/714caa63-0e8e-4d53-addd-11092a5b68c3-kube-api-access-h7dk6" (OuterVolumeSpecName: "kube-api-access-h7dk6") pod "714caa63-0e8e-4d53-addd-11092a5b68c3" (UID: "714caa63-0e8e-4d53-addd-11092a5b68c3"). InnerVolumeSpecName "kube-api-access-h7dk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.366570 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "714caa63-0e8e-4d53-addd-11092a5b68c3" (UID: "714caa63-0e8e-4d53-addd-11092a5b68c3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.369002 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "714caa63-0e8e-4d53-addd-11092a5b68c3" (UID: "714caa63-0e8e-4d53-addd-11092a5b68c3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.372466 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "714caa63-0e8e-4d53-addd-11092a5b68c3" (UID: "714caa63-0e8e-4d53-addd-11092a5b68c3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.377094 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-config" (OuterVolumeSpecName: "config") pod "714caa63-0e8e-4d53-addd-11092a5b68c3" (UID: "714caa63-0e8e-4d53-addd-11092a5b68c3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.377577 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "714caa63-0e8e-4d53-addd-11092a5b68c3" (UID: "714caa63-0e8e-4d53-addd-11092a5b68c3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.448001 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.448255 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.448325 4708 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.448386 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7dk6\" (UniqueName: \"kubernetes.io/projected/714caa63-0e8e-4d53-addd-11092a5b68c3-kube-api-access-h7dk6\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.448449 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.448513 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/714caa63-0e8e-4d53-addd-11092a5b68c3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.840076 4708 generic.go:334] "Generic (PLEG): container finished" podID="21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" containerID="791324fb13eabbddd2baf9744b59a7937725c75a27bd96bcd6d8f15d6ca60319" exitCode=0 Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.840126 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" event={"ID":"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb","Type":"ContainerDied","Data":"791324fb13eabbddd2baf9744b59a7937725c75a27bd96bcd6d8f15d6ca60319"} Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.845568 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" event={"ID":"1a9d62cd-7eb6-4c16-ab2e-3051620d2650","Type":"ContainerDied","Data":"e4ec1201f8aa9beefe4f04f8d8ae9440f6e532bad66f325a3c2fdab79faad2bf"} Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.845615 4708 scope.go:117] "RemoveContainer" containerID="08b500d7d8e54f3d56a9ef32b69f3f3f8a5f1fa410ff74f2965c27140bffe4df" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.845733 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-gjbwv" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.858872 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-rvq47" Feb 03 07:27:40 crc kubenswrapper[4708]: I0203 07:27:40.860349 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-rvq47" event={"ID":"714caa63-0e8e-4d53-addd-11092a5b68c3","Type":"ContainerDied","Data":"b292ab2c788be0d8d2d9cf90b06317eab213a4b010fbdf0ea692c22e1e783183"} Feb 03 07:27:41 crc kubenswrapper[4708]: I0203 07:27:41.045673 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-gjbwv"] Feb 03 07:27:41 crc kubenswrapper[4708]: I0203 07:27:41.053031 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-gjbwv"] Feb 03 07:27:41 crc kubenswrapper[4708]: I0203 07:27:41.084514 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-rvq47"] Feb 03 07:27:41 crc kubenswrapper[4708]: I0203 07:27:41.095141 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-rvq47"] Feb 03 07:27:42 crc kubenswrapper[4708]: I0203 07:27:42.103487 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a9d62cd-7eb6-4c16-ab2e-3051620d2650" path="/var/lib/kubelet/pods/1a9d62cd-7eb6-4c16-ab2e-3051620d2650/volumes" Feb 03 07:27:42 crc kubenswrapper[4708]: I0203 07:27:42.105684 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="714caa63-0e8e-4d53-addd-11092a5b68c3" path="/var/lib/kubelet/pods/714caa63-0e8e-4d53-addd-11092a5b68c3/volumes" Feb 03 07:27:42 crc kubenswrapper[4708]: I0203 07:27:42.875584 4708 generic.go:334] "Generic (PLEG): container finished" podID="4c0d871a-3a87-4651-8392-e69acf628940" containerID="004112adfeeb812b356b4367486306021c116a343bd149394818843e76cea9ea" exitCode=0 Feb 03 07:27:42 crc kubenswrapper[4708]: I0203 07:27:42.875635 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-create-ktbjr" event={"ID":"4c0d871a-3a87-4651-8392-e69acf628940","Type":"ContainerDied","Data":"004112adfeeb812b356b4367486306021c116a343bd149394818843e76cea9ea"} Feb 03 07:27:43 crc kubenswrapper[4708]: I0203 07:27:43.398659 4708 scope.go:117] "RemoveContainer" containerID="a29f8a0ca7da0417c42d2598f4814b8bd972907ab59161087a7b6297ab3573a8" Feb 03 07:27:43 crc kubenswrapper[4708]: I0203 07:27:43.423862 4708 scope.go:117] "RemoveContainer" containerID="dc0d4736890cd89e50aaa3f257682eed2aaeee5491c7a45ee7e1214ab10ae54b" Feb 03 07:27:43 crc kubenswrapper[4708]: I0203 07:27:43.886486 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" event={"ID":"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb","Type":"ContainerStarted","Data":"3de21f52324ce1921b8558e8eaf43983881da531f4a9bdedd6daf119fa85efff"} Feb 03 07:27:44 crc kubenswrapper[4708]: I0203 07:27:44.929645 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-bbf1-account-create-update-5htsg" event={"ID":"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0","Type":"ContainerDied","Data":"80d477dbaa1060a84fd0e7ac381dbbcd5f194e8f203da36a548b95de6f0cce3f"} Feb 03 07:27:44 crc kubenswrapper[4708]: I0203 07:27:44.929662 4708 generic.go:334] "Generic (PLEG): container finished" podID="efea8ba9-1f4b-4a03-aafd-8388de2f8bc0" containerID="80d477dbaa1060a84fd0e7ac381dbbcd5f194e8f203da36a548b95de6f0cce3f" exitCode=0 Feb 03 07:27:44 crc kubenswrapper[4708]: I0203 07:27:44.932013 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:44 crc kubenswrapper[4708]: I0203 07:27:44.958904 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" podStartSLOduration=7.958864072 podStartE2EDuration="7.958864072s" podCreationTimestamp="2026-02-03 07:27:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:27:44.950170239 +0000 UTC m=+1043.932117046" watchObservedRunningTime="2026-02-03 07:27:44.958864072 +0000 UTC m=+1043.940810879" Feb 03 07:27:45 crc kubenswrapper[4708]: I0203 07:27:45.942713 4708 generic.go:334] "Generic (PLEG): container finished" podID="2af44247-24f0-4b7b-aac1-f431a0ba0eb2" containerID="94fb1a6d62db6177569169006262b4aa64eb823c98086daf1735d3a05ebabe81" exitCode=0 Feb 03 07:27:45 crc kubenswrapper[4708]: I0203 07:27:45.942816 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7lhsc" event={"ID":"2af44247-24f0-4b7b-aac1-f431a0ba0eb2","Type":"ContainerDied","Data":"94fb1a6d62db6177569169006262b4aa64eb823c98086daf1735d3a05ebabe81"} Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.618061 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-create-ktbjr" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.634253 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-bbf1-account-create-update-5htsg" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.638101 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.710282 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-config-data\") pod \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.710366 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-scripts\") pod \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.710410 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pwrw\" (UniqueName: \"kubernetes.io/projected/4c0d871a-3a87-4651-8392-e69acf628940-kube-api-access-2pwrw\") pod \"4c0d871a-3a87-4651-8392-e69acf628940\" (UID: \"4c0d871a-3a87-4651-8392-e69acf628940\") " Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.710426 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-credential-keys\") pod \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.710444 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-combined-ca-bundle\") pod \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.710523 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-848wd\" (UniqueName: \"kubernetes.io/projected/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-kube-api-access-848wd\") pod \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.710571 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rn9f\" (UniqueName: \"kubernetes.io/projected/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-kube-api-access-7rn9f\") pod \"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0\" (UID: \"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0\") " Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.710597 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c0d871a-3a87-4651-8392-e69acf628940-operator-scripts\") pod \"4c0d871a-3a87-4651-8392-e69acf628940\" (UID: \"4c0d871a-3a87-4651-8392-e69acf628940\") " Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.710672 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-operator-scripts\") pod \"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0\" (UID: \"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0\") " Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.710740 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-fernet-keys\") pod \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\" (UID: \"2af44247-24f0-4b7b-aac1-f431a0ba0eb2\") " Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.711555 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c0d871a-3a87-4651-8392-e69acf628940-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4c0d871a-3a87-4651-8392-e69acf628940" (UID: "4c0d871a-3a87-4651-8392-e69acf628940"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.711556 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "efea8ba9-1f4b-4a03-aafd-8388de2f8bc0" (UID: "efea8ba9-1f4b-4a03-aafd-8388de2f8bc0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.717166 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "2af44247-24f0-4b7b-aac1-f431a0ba0eb2" (UID: "2af44247-24f0-4b7b-aac1-f431a0ba0eb2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.717404 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "2af44247-24f0-4b7b-aac1-f431a0ba0eb2" (UID: "2af44247-24f0-4b7b-aac1-f431a0ba0eb2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.717620 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c0d871a-3a87-4651-8392-e69acf628940-kube-api-access-2pwrw" (OuterVolumeSpecName: "kube-api-access-2pwrw") pod "4c0d871a-3a87-4651-8392-e69acf628940" (UID: "4c0d871a-3a87-4651-8392-e69acf628940"). InnerVolumeSpecName "kube-api-access-2pwrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.717748 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-scripts" (OuterVolumeSpecName: "scripts") pod "2af44247-24f0-4b7b-aac1-f431a0ba0eb2" (UID: "2af44247-24f0-4b7b-aac1-f431a0ba0eb2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.720001 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-kube-api-access-7rn9f" (OuterVolumeSpecName: "kube-api-access-7rn9f") pod "efea8ba9-1f4b-4a03-aafd-8388de2f8bc0" (UID: "efea8ba9-1f4b-4a03-aafd-8388de2f8bc0"). InnerVolumeSpecName "kube-api-access-7rn9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.729765 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-kube-api-access-848wd" (OuterVolumeSpecName: "kube-api-access-848wd") pod "2af44247-24f0-4b7b-aac1-f431a0ba0eb2" (UID: "2af44247-24f0-4b7b-aac1-f431a0ba0eb2"). InnerVolumeSpecName "kube-api-access-848wd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.749858 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-config-data" (OuterVolumeSpecName: "config-data") pod "2af44247-24f0-4b7b-aac1-f431a0ba0eb2" (UID: "2af44247-24f0-4b7b-aac1-f431a0ba0eb2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.756347 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2af44247-24f0-4b7b-aac1-f431a0ba0eb2" (UID: "2af44247-24f0-4b7b-aac1-f431a0ba0eb2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.812879 4708 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.812921 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.812933 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.812947 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pwrw\" (UniqueName: \"kubernetes.io/projected/4c0d871a-3a87-4651-8392-e69acf628940-kube-api-access-2pwrw\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.812961 4708 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.812973 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.812986 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-848wd\" (UniqueName: \"kubernetes.io/projected/2af44247-24f0-4b7b-aac1-f431a0ba0eb2-kube-api-access-848wd\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.812997 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rn9f\" (UniqueName: \"kubernetes.io/projected/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-kube-api-access-7rn9f\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.813008 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c0d871a-3a87-4651-8392-e69acf628940-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.813019 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.970544 4708 generic.go:334] "Generic (PLEG): container finished" podID="6201bbf3-523c-4a64-9703-fb0adbc0955a" containerID="bc6a69b873ed3d6240e916a34664035c89da8030345790479c4daa44494dccfa" exitCode=0 Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.970637 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-t95nr" event={"ID":"6201bbf3-523c-4a64-9703-fb0adbc0955a","Type":"ContainerDied","Data":"bc6a69b873ed3d6240e916a34664035c89da8030345790479c4daa44494dccfa"} Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.971843 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-bbf1-account-create-update-5htsg" event={"ID":"efea8ba9-1f4b-4a03-aafd-8388de2f8bc0","Type":"ContainerDied","Data":"67309d8772b73bcf4c84ee3b08b1136aed2d4364ce3570c344368ba8a1fdaf63"} Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.971872 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67309d8772b73bcf4c84ee3b08b1136aed2d4364ce3570c344368ba8a1fdaf63" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.971924 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-bbf1-account-create-update-5htsg" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.973546 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7lhsc" event={"ID":"2af44247-24f0-4b7b-aac1-f431a0ba0eb2","Type":"ContainerDied","Data":"8a4ff1d49ba063fe941e00a11a84f6eb58a29adf7fdf3f8d8a72dab153a75310"} Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.973580 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a4ff1d49ba063fe941e00a11a84f6eb58a29adf7fdf3f8d8a72dab153a75310" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.973640 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7lhsc" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.975292 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-create-ktbjr" event={"ID":"4c0d871a-3a87-4651-8392-e69acf628940","Type":"ContainerDied","Data":"d47be63d5cc91fe7a51f8fc6cf8b7e4c93c6f026780a752ca254c8ed9c9ff180"} Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.975317 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d47be63d5cc91fe7a51f8fc6cf8b7e4c93c6f026780a752ca254c8ed9c9ff180" Feb 03 07:27:48 crc kubenswrapper[4708]: I0203 07:27:48.975352 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-create-ktbjr" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.720681 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7lhsc"] Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.728025 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7lhsc"] Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.837462 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-blhdl"] Feb 03 07:27:49 crc kubenswrapper[4708]: E0203 07:27:49.837874 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="714caa63-0e8e-4d53-addd-11092a5b68c3" containerName="init" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.837891 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="714caa63-0e8e-4d53-addd-11092a5b68c3" containerName="init" Feb 03 07:27:49 crc kubenswrapper[4708]: E0203 07:27:49.837920 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a9d62cd-7eb6-4c16-ab2e-3051620d2650" containerName="init" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.837927 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a9d62cd-7eb6-4c16-ab2e-3051620d2650" containerName="init" Feb 03 07:27:49 crc kubenswrapper[4708]: E0203 07:27:49.837943 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c0d871a-3a87-4651-8392-e69acf628940" containerName="mariadb-database-create" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.837952 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c0d871a-3a87-4651-8392-e69acf628940" containerName="mariadb-database-create" Feb 03 07:27:49 crc kubenswrapper[4708]: E0203 07:27:49.837963 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a9d62cd-7eb6-4c16-ab2e-3051620d2650" containerName="dnsmasq-dns" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.837971 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a9d62cd-7eb6-4c16-ab2e-3051620d2650" containerName="dnsmasq-dns" Feb 03 07:27:49 crc kubenswrapper[4708]: E0203 07:27:49.837987 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2af44247-24f0-4b7b-aac1-f431a0ba0eb2" containerName="keystone-bootstrap" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.837995 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="2af44247-24f0-4b7b-aac1-f431a0ba0eb2" containerName="keystone-bootstrap" Feb 03 07:27:49 crc kubenswrapper[4708]: E0203 07:27:49.839908 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efea8ba9-1f4b-4a03-aafd-8388de2f8bc0" containerName="mariadb-account-create-update" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.839926 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="efea8ba9-1f4b-4a03-aafd-8388de2f8bc0" containerName="mariadb-account-create-update" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.848475 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c0d871a-3a87-4651-8392-e69acf628940" containerName="mariadb-database-create" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.848584 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="2af44247-24f0-4b7b-aac1-f431a0ba0eb2" containerName="keystone-bootstrap" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.848603 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="714caa63-0e8e-4d53-addd-11092a5b68c3" containerName="init" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.848616 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="efea8ba9-1f4b-4a03-aafd-8388de2f8bc0" containerName="mariadb-account-create-update" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.848677 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a9d62cd-7eb6-4c16-ab2e-3051620d2650" containerName="dnsmasq-dns" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.849295 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-blhdl"] Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.849389 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.861147 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.868291 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.868503 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x5pvc" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.868508 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.868615 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.943707 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-config-data\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.943833 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm84h\" (UniqueName: \"kubernetes.io/projected/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-kube-api-access-wm84h\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.943888 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-scripts\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.943958 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-fernet-keys\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.944086 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-credential-keys\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:49 crc kubenswrapper[4708]: I0203 07:27:49.944208 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-combined-ca-bundle\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.045739 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm84h\" (UniqueName: \"kubernetes.io/projected/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-kube-api-access-wm84h\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.045837 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-scripts\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.045894 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-fernet-keys\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.045954 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-credential-keys\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.046013 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-combined-ca-bundle\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.046043 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-config-data\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.051983 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-fernet-keys\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.052488 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-config-data\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.053084 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-scripts\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.053186 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-combined-ca-bundle\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.053294 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-credential-keys\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.078680 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm84h\" (UniqueName: \"kubernetes.io/projected/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-kube-api-access-wm84h\") pod \"keystone-bootstrap-blhdl\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.102650 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2af44247-24f0-4b7b-aac1-f431a0ba0eb2" path="/var/lib/kubelet/pods/2af44247-24f0-4b7b-aac1-f431a0ba0eb2/volumes" Feb 03 07:27:50 crc kubenswrapper[4708]: I0203 07:27:50.184734 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.542730 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-db-sync-8w6c7"] Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.544902 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.548568 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-config-data" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.548668 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-scripts" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.548752 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-ironic-dockercfg-5lpxl" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.555485 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-db-sync-8w6c7"] Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.598712 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.598773 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99r6w\" (UniqueName: \"kubernetes.io/projected/ec5c8613-c88b-4cc5-8ad4-440e65523618-kube-api-access-99r6w\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.598825 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/ec5c8613-c88b-4cc5-8ad4-440e65523618-etc-podinfo\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.598883 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-scripts\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.598924 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data-merged\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.598939 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-combined-ca-bundle\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.700533 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.700606 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99r6w\" (UniqueName: \"kubernetes.io/projected/ec5c8613-c88b-4cc5-8ad4-440e65523618-kube-api-access-99r6w\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.700645 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/ec5c8613-c88b-4cc5-8ad4-440e65523618-etc-podinfo\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.700723 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-scripts\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.700787 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data-merged\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.700825 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-combined-ca-bundle\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.701316 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data-merged\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.705435 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-scripts\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.705437 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/ec5c8613-c88b-4cc5-8ad4-440e65523618-etc-podinfo\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.706184 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.706244 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-combined-ca-bundle\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.732137 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99r6w\" (UniqueName: \"kubernetes.io/projected/ec5c8613-c88b-4cc5-8ad4-440e65523618-kube-api-access-99r6w\") pod \"ironic-db-sync-8w6c7\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:52 crc kubenswrapper[4708]: I0203 07:27:52.876782 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:52.937398 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.019573 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-85cjv"] Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.019837 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-85cjv" podUID="412c823d-20ba-42cb-9c05-70bee2ee89a2" containerName="dnsmasq-dns" containerID="cri-o://59d478c2fd90f7840a37dec3167cce1573203aa6de1841bd844c5d928490f0c2" gracePeriod=10 Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.033253 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-t95nr" event={"ID":"6201bbf3-523c-4a64-9703-fb0adbc0955a","Type":"ContainerDied","Data":"dd6f5c040aab417cac2106e0ca5fe7032e28098452f8ce35cc1ac0cb1ea91f3c"} Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.033294 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd6f5c040aab417cac2106e0ca5fe7032e28098452f8ce35cc1ac0cb1ea91f3c" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.048070 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.118727 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-config-data\") pod \"6201bbf3-523c-4a64-9703-fb0adbc0955a\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.119061 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-combined-ca-bundle\") pod \"6201bbf3-523c-4a64-9703-fb0adbc0955a\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.119117 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsdhc\" (UniqueName: \"kubernetes.io/projected/6201bbf3-523c-4a64-9703-fb0adbc0955a-kube-api-access-jsdhc\") pod \"6201bbf3-523c-4a64-9703-fb0adbc0955a\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.119287 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-db-sync-config-data\") pod \"6201bbf3-523c-4a64-9703-fb0adbc0955a\" (UID: \"6201bbf3-523c-4a64-9703-fb0adbc0955a\") " Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.124026 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6201bbf3-523c-4a64-9703-fb0adbc0955a" (UID: "6201bbf3-523c-4a64-9703-fb0adbc0955a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.124127 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6201bbf3-523c-4a64-9703-fb0adbc0955a-kube-api-access-jsdhc" (OuterVolumeSpecName: "kube-api-access-jsdhc") pod "6201bbf3-523c-4a64-9703-fb0adbc0955a" (UID: "6201bbf3-523c-4a64-9703-fb0adbc0955a"). InnerVolumeSpecName "kube-api-access-jsdhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.159221 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6201bbf3-523c-4a64-9703-fb0adbc0955a" (UID: "6201bbf3-523c-4a64-9703-fb0adbc0955a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.193608 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-config-data" (OuterVolumeSpecName: "config-data") pod "6201bbf3-523c-4a64-9703-fb0adbc0955a" (UID: "6201bbf3-523c-4a64-9703-fb0adbc0955a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.221686 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.221711 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.221722 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsdhc\" (UniqueName: \"kubernetes.io/projected/6201bbf3-523c-4a64-9703-fb0adbc0955a-kube-api-access-jsdhc\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.221733 4708 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6201bbf3-523c-4a64-9703-fb0adbc0955a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.833513 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.833600 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.833657 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.834463 4708 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1f83852e58fe0e7c3b1e3ce74595be18ac409f305b8edecdccc7efc4c0f59a4b"} pod="openshift-machine-config-operator/machine-config-daemon-r94bn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:27:53 crc kubenswrapper[4708]: I0203 07:27:53.834549 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" containerID="cri-o://1f83852e58fe0e7c3b1e3ce74595be18ac409f305b8edecdccc7efc4c0f59a4b" gracePeriod=600 Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.043208 4708 generic.go:334] "Generic (PLEG): container finished" podID="67498414-5132-496e-9638-189f5941ace0" containerID="1f83852e58fe0e7c3b1e3ce74595be18ac409f305b8edecdccc7efc4c0f59a4b" exitCode=0 Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.043273 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerDied","Data":"1f83852e58fe0e7c3b1e3ce74595be18ac409f305b8edecdccc7efc4c0f59a4b"} Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.045086 4708 scope.go:117] "RemoveContainer" containerID="85eaca543a1965c16dfd9764a0051f13446290670638b0dde7e65e129f02d68c" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.049845 4708 generic.go:334] "Generic (PLEG): container finished" podID="412c823d-20ba-42cb-9c05-70bee2ee89a2" containerID="59d478c2fd90f7840a37dec3167cce1573203aa6de1841bd844c5d928490f0c2" exitCode=0 Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.049921 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-85cjv" event={"ID":"412c823d-20ba-42cb-9c05-70bee2ee89a2","Type":"ContainerDied","Data":"59d478c2fd90f7840a37dec3167cce1573203aa6de1841bd844c5d928490f0c2"} Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.049953 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-t95nr" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.350435 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-hwghb"] Feb 03 07:27:54 crc kubenswrapper[4708]: E0203 07:27:54.356272 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6201bbf3-523c-4a64-9703-fb0adbc0955a" containerName="glance-db-sync" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.356304 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="6201bbf3-523c-4a64-9703-fb0adbc0955a" containerName="glance-db-sync" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.356489 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="6201bbf3-523c-4a64-9703-fb0adbc0955a" containerName="glance-db-sync" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.357327 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.365633 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-hwghb"] Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.440333 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.440388 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.440416 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.440460 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.440493 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-config\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.440548 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpjx5\" (UniqueName: \"kubernetes.io/projected/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-kube-api-access-hpjx5\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.544429 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpjx5\" (UniqueName: \"kubernetes.io/projected/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-kube-api-access-hpjx5\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.544615 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.544662 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.544701 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.544719 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.544753 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-config\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.563581 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.563887 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.564234 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.568589 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-config\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.569687 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.569866 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpjx5\" (UniqueName: \"kubernetes.io/projected/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-kube-api-access-hpjx5\") pod \"dnsmasq-dns-785d8bcb8c-hwghb\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:54 crc kubenswrapper[4708]: I0203 07:27:54.690772 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.407137 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.408816 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.411330 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.411566 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rwpwp" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.411762 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.424896 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.458617 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-config-data\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.458693 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-scripts\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.458717 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sblf\" (UniqueName: \"kubernetes.io/projected/e7005a50-4e51-4074-8a02-d76857064103-kube-api-access-8sblf\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.458742 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.458833 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-logs\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.458853 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.458886 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.560078 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-scripts\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.560136 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sblf\" (UniqueName: \"kubernetes.io/projected/e7005a50-4e51-4074-8a02-d76857064103-kube-api-access-8sblf\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.560160 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.560219 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-logs\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.560239 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.560274 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.560330 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-config-data\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.560687 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.560715 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-logs\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.561053 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.566825 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-config-data\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.566897 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.572236 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-scripts\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.576822 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sblf\" (UniqueName: \"kubernetes.io/projected/e7005a50-4e51-4074-8a02-d76857064103-kube-api-access-8sblf\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.599658 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.601179 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.603723 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.604114 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.610920 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.665556 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.665893 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.666039 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhrqm\" (UniqueName: \"kubernetes.io/projected/3753e142-5f5e-45d7-ad6d-f718ae4abed6-kube-api-access-rhrqm\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.666163 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.666255 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.666654 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.666854 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-logs\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.730270 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.768848 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-logs\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.768917 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.768978 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.769010 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhrqm\" (UniqueName: \"kubernetes.io/projected/3753e142-5f5e-45d7-ad6d-f718ae4abed6-kube-api-access-rhrqm\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.769103 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.769149 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.769186 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.769890 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.770095 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-logs\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.769744 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.773652 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.776937 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.779260 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.786083 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhrqm\" (UniqueName: \"kubernetes.io/projected/3753e142-5f5e-45d7-ad6d-f718ae4abed6-kube-api-access-rhrqm\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.799073 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:27:55 crc kubenswrapper[4708]: I0203 07:27:55.977864 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:27:56 crc kubenswrapper[4708]: I0203 07:27:56.754559 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-85cjv" podUID="412c823d-20ba-42cb-9c05-70bee2ee89a2" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Feb 03 07:27:57 crc kubenswrapper[4708]: I0203 07:27:57.165704 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:27:57 crc kubenswrapper[4708]: I0203 07:27:57.239921 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:28:01 crc kubenswrapper[4708]: I0203 07:28:01.755405 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-85cjv" podUID="412c823d-20ba-42cb-9c05-70bee2ee89a2" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Feb 03 07:28:03 crc kubenswrapper[4708]: E0203 07:28:03.968162 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Feb 03 07:28:03 crc kubenswrapper[4708]: E0203 07:28:03.970215 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xxlq6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-w75bv_openstack(fde8edd5-50e0-4bb0-8701-54e0998444a1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:28:03 crc kubenswrapper[4708]: E0203 07:28:03.971943 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-w75bv" podUID="fde8edd5-50e0-4bb0-8701-54e0998444a1" Feb 03 07:28:04 crc kubenswrapper[4708]: E0203 07:28:04.189380 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-w75bv" podUID="fde8edd5-50e0-4bb0-8701-54e0998444a1" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.393529 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.548519 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-config\") pod \"412c823d-20ba-42cb-9c05-70bee2ee89a2\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.548923 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-dns-svc\") pod \"412c823d-20ba-42cb-9c05-70bee2ee89a2\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.548998 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-nb\") pod \"412c823d-20ba-42cb-9c05-70bee2ee89a2\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.549031 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbjtm\" (UniqueName: \"kubernetes.io/projected/412c823d-20ba-42cb-9c05-70bee2ee89a2-kube-api-access-hbjtm\") pod \"412c823d-20ba-42cb-9c05-70bee2ee89a2\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.549071 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-sb\") pod \"412c823d-20ba-42cb-9c05-70bee2ee89a2\" (UID: \"412c823d-20ba-42cb-9c05-70bee2ee89a2\") " Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.638197 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/412c823d-20ba-42cb-9c05-70bee2ee89a2-kube-api-access-hbjtm" (OuterVolumeSpecName: "kube-api-access-hbjtm") pod "412c823d-20ba-42cb-9c05-70bee2ee89a2" (UID: "412c823d-20ba-42cb-9c05-70bee2ee89a2"). InnerVolumeSpecName "kube-api-access-hbjtm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.653412 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-hwghb"] Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.657587 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbjtm\" (UniqueName: \"kubernetes.io/projected/412c823d-20ba-42cb-9c05-70bee2ee89a2-kube-api-access-hbjtm\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.713545 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "412c823d-20ba-42cb-9c05-70bee2ee89a2" (UID: "412c823d-20ba-42cb-9c05-70bee2ee89a2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.730323 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "412c823d-20ba-42cb-9c05-70bee2ee89a2" (UID: "412c823d-20ba-42cb-9c05-70bee2ee89a2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.744245 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "412c823d-20ba-42cb-9c05-70bee2ee89a2" (UID: "412c823d-20ba-42cb-9c05-70bee2ee89a2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.759419 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.759453 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.759465 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.774023 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-config" (OuterVolumeSpecName: "config") pod "412c823d-20ba-42cb-9c05-70bee2ee89a2" (UID: "412c823d-20ba-42cb-9c05-70bee2ee89a2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.832920 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-blhdl"] Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.838876 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:28:04 crc kubenswrapper[4708]: W0203 07:28:04.845602 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec5c8613_c88b_4cc5_8ad4_440e65523618.slice/crio-785ab40b156d9f85aab9411f0c2c054ce53a59dc920e95658ed731c3ecddcf3d WatchSource:0}: Error finding container 785ab40b156d9f85aab9411f0c2c054ce53a59dc920e95658ed731c3ecddcf3d: Status 404 returned error can't find the container with id 785ab40b156d9f85aab9411f0c2c054ce53a59dc920e95658ed731c3ecddcf3d Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.848862 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-db-sync-8w6c7"] Feb 03 07:28:04 crc kubenswrapper[4708]: I0203 07:28:04.865917 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/412c823d-20ba-42cb-9c05-70bee2ee89a2-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.051898 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:28:05 crc kubenswrapper[4708]: W0203 07:28:05.059452 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7005a50_4e51_4074_8a02_d76857064103.slice/crio-ce7b4aeb3dbbb16073fe24f8f78d1af8d17fa87264cf7be5051b238464edf67a WatchSource:0}: Error finding container ce7b4aeb3dbbb16073fe24f8f78d1af8d17fa87264cf7be5051b238464edf67a: Status 404 returned error can't find the container with id ce7b4aeb3dbbb16073fe24f8f78d1af8d17fa87264cf7be5051b238464edf67a Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.190301 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cl2c7" event={"ID":"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29","Type":"ContainerStarted","Data":"8210fbdb49636d40b4ad1429001b753c58f8dcb8f8763c5be48d3501f7d51f32"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.193597 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-blhdl" event={"ID":"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d","Type":"ContainerStarted","Data":"c977862cb77d9a2b79560c2f9ab6884282a15281c1867e5614b6bc74e07a6cfb"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.198289 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2smzn" event={"ID":"1a3122ca-fa36-4033-a44f-36d12d0b3f4a","Type":"ContainerStarted","Data":"740a3fafcc5199dcb4e70c85e69211cf63edc3daa6ddc13939b116f97220af85"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.199833 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e7005a50-4e51-4074-8a02-d76857064103","Type":"ContainerStarted","Data":"ce7b4aeb3dbbb16073fe24f8f78d1af8d17fa87264cf7be5051b238464edf67a"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.208399 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"df2930ca149e0a66df19a750e27479be61f11887a85606435a8612426d90bb50"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.211047 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-8w6c7" event={"ID":"ec5c8613-c88b-4cc5-8ad4-440e65523618","Type":"ContainerStarted","Data":"785ab40b156d9f85aab9411f0c2c054ce53a59dc920e95658ed731c3ecddcf3d"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.214319 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46305c26-487f-4187-a8ae-48fd2319d25a","Type":"ContainerStarted","Data":"cc9c1d8b39a8e19ddeda841a7167cce7198b34f64efd8c521320ce2762ac0382"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.214380 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-cl2c7" podStartSLOduration=3.079313195 podStartE2EDuration="28.214360473s" podCreationTimestamp="2026-02-03 07:27:37 +0000 UTC" firstStartedPulling="2026-02-03 07:27:38.784162201 +0000 UTC m=+1037.766109008" lastFinishedPulling="2026-02-03 07:28:03.919209479 +0000 UTC m=+1062.901156286" observedRunningTime="2026-02-03 07:28:05.206937651 +0000 UTC m=+1064.188884458" watchObservedRunningTime="2026-02-03 07:28:05.214360473 +0000 UTC m=+1064.196307280" Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.216713 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3753e142-5f5e-45d7-ad6d-f718ae4abed6","Type":"ContainerStarted","Data":"905ffb6e791b3a5b2570efbb619dd5891f40c69cf5354892769264d2a5de6a79"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.225136 4708 generic.go:334] "Generic (PLEG): container finished" podID="2739cdfe-549c-495a-ac6c-7f6cb96de9a4" containerID="7b647a3e19efa8da5c3bb396f17dae62c898c85c11d027d948fd32a5c43995e9" exitCode=0 Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.225212 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" event={"ID":"2739cdfe-549c-495a-ac6c-7f6cb96de9a4","Type":"ContainerDied","Data":"7b647a3e19efa8da5c3bb396f17dae62c898c85c11d027d948fd32a5c43995e9"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.225234 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" event={"ID":"2739cdfe-549c-495a-ac6c-7f6cb96de9a4","Type":"ContainerStarted","Data":"2aacd1901cd490917c4da322b580a2cb5095ab8bab297131961634962103ea30"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.230482 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-2smzn" podStartSLOduration=3.006962306 podStartE2EDuration="28.230465028s" podCreationTimestamp="2026-02-03 07:27:37 +0000 UTC" firstStartedPulling="2026-02-03 07:27:38.768510869 +0000 UTC m=+1037.750457676" lastFinishedPulling="2026-02-03 07:28:03.992013591 +0000 UTC m=+1062.973960398" observedRunningTime="2026-02-03 07:28:05.220178855 +0000 UTC m=+1064.202125682" watchObservedRunningTime="2026-02-03 07:28:05.230465028 +0000 UTC m=+1064.212411835" Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.233100 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-85cjv" event={"ID":"412c823d-20ba-42cb-9c05-70bee2ee89a2","Type":"ContainerDied","Data":"3ed74be5574684387540bf6c9e2a8f152e859d2bef3cbcfcc6f4e6a4bde1a517"} Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.233148 4708 scope.go:117] "RemoveContainer" containerID="59d478c2fd90f7840a37dec3167cce1573203aa6de1841bd844c5d928490f0c2" Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.233233 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-85cjv" Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.389849 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-85cjv"] Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.398594 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-85cjv"] Feb 03 07:28:05 crc kubenswrapper[4708]: I0203 07:28:05.415465 4708 scope.go:117] "RemoveContainer" containerID="0e2b31aff1ea9a650da4a40bf6a2a234e0252278e569ade611e9b231f596a8cd" Feb 03 07:28:06 crc kubenswrapper[4708]: I0203 07:28:06.111252 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="412c823d-20ba-42cb-9c05-70bee2ee89a2" path="/var/lib/kubelet/pods/412c823d-20ba-42cb-9c05-70bee2ee89a2/volumes" Feb 03 07:28:06 crc kubenswrapper[4708]: I0203 07:28:06.242617 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" event={"ID":"2739cdfe-549c-495a-ac6c-7f6cb96de9a4","Type":"ContainerStarted","Data":"c63524d993b523334425c38ab56f0748609e5877d97af5e7d7f6749f20d506ce"} Feb 03 07:28:06 crc kubenswrapper[4708]: I0203 07:28:06.242702 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:28:06 crc kubenswrapper[4708]: I0203 07:28:06.252353 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-blhdl" event={"ID":"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d","Type":"ContainerStarted","Data":"26d9c4a841e463329378ef7d36ba7eec3ebd9de2972c52e2177fb8f07b5b3163"} Feb 03 07:28:06 crc kubenswrapper[4708]: I0203 07:28:06.255300 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3753e142-5f5e-45d7-ad6d-f718ae4abed6","Type":"ContainerStarted","Data":"8b4171a89a9867d5c8093f0d366d66dfd2476b60dcfff03e52cdca88b6395282"} Feb 03 07:28:06 crc kubenswrapper[4708]: I0203 07:28:06.257860 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e7005a50-4e51-4074-8a02-d76857064103","Type":"ContainerStarted","Data":"e013166e4c5fd45777c1726e65ce3e99a7e85ab78d199af7fc8b6a9842c67fe9"} Feb 03 07:28:06 crc kubenswrapper[4708]: I0203 07:28:06.280599 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" podStartSLOduration=12.280577403 podStartE2EDuration="12.280577403s" podCreationTimestamp="2026-02-03 07:27:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:06.270288841 +0000 UTC m=+1065.252235668" watchObservedRunningTime="2026-02-03 07:28:06.280577403 +0000 UTC m=+1065.262524210" Feb 03 07:28:06 crc kubenswrapper[4708]: I0203 07:28:06.291466 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-blhdl" podStartSLOduration=17.291447709 podStartE2EDuration="17.291447709s" podCreationTimestamp="2026-02-03 07:27:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:06.287625376 +0000 UTC m=+1065.269572183" watchObservedRunningTime="2026-02-03 07:28:06.291447709 +0000 UTC m=+1065.273394516" Feb 03 07:28:07 crc kubenswrapper[4708]: I0203 07:28:07.266187 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46305c26-487f-4187-a8ae-48fd2319d25a","Type":"ContainerStarted","Data":"7ba1c6e5b06e4d3086cb7edbcc917f45894325a6286b31509c170c2ab9bb888a"} Feb 03 07:28:07 crc kubenswrapper[4708]: I0203 07:28:07.268643 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3753e142-5f5e-45d7-ad6d-f718ae4abed6","Type":"ContainerStarted","Data":"c5c4b388ea2282fb4bcc01aaf66bfc353c25d65b1cf7722a4e42187ff9837a57"} Feb 03 07:28:07 crc kubenswrapper[4708]: I0203 07:28:07.268740 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" containerName="glance-log" containerID="cri-o://8b4171a89a9867d5c8093f0d366d66dfd2476b60dcfff03e52cdca88b6395282" gracePeriod=30 Feb 03 07:28:07 crc kubenswrapper[4708]: I0203 07:28:07.268859 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" containerName="glance-httpd" containerID="cri-o://c5c4b388ea2282fb4bcc01aaf66bfc353c25d65b1cf7722a4e42187ff9837a57" gracePeriod=30 Feb 03 07:28:07 crc kubenswrapper[4708]: I0203 07:28:07.273608 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e7005a50-4e51-4074-8a02-d76857064103","Type":"ContainerStarted","Data":"b1cbf89bd881198e0cd36c7a0a2a3c47e1e6d312d384e560383a696da9b89114"} Feb 03 07:28:07 crc kubenswrapper[4708]: I0203 07:28:07.273742 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="e7005a50-4e51-4074-8a02-d76857064103" containerName="glance-log" containerID="cri-o://e013166e4c5fd45777c1726e65ce3e99a7e85ab78d199af7fc8b6a9842c67fe9" gracePeriod=30 Feb 03 07:28:07 crc kubenswrapper[4708]: I0203 07:28:07.273878 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="e7005a50-4e51-4074-8a02-d76857064103" containerName="glance-httpd" containerID="cri-o://b1cbf89bd881198e0cd36c7a0a2a3c47e1e6d312d384e560383a696da9b89114" gracePeriod=30 Feb 03 07:28:07 crc kubenswrapper[4708]: I0203 07:28:07.309435 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=13.309416278 podStartE2EDuration="13.309416278s" podCreationTimestamp="2026-02-03 07:27:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:07.288748662 +0000 UTC m=+1066.270695469" watchObservedRunningTime="2026-02-03 07:28:07.309416278 +0000 UTC m=+1066.291363085" Feb 03 07:28:07 crc kubenswrapper[4708]: I0203 07:28:07.322241 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=13.322222901 podStartE2EDuration="13.322222901s" podCreationTimestamp="2026-02-03 07:27:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:07.320360825 +0000 UTC m=+1066.302307632" watchObservedRunningTime="2026-02-03 07:28:07.322222901 +0000 UTC m=+1066.304169708" Feb 03 07:28:08 crc kubenswrapper[4708]: I0203 07:28:08.293375 4708 generic.go:334] "Generic (PLEG): container finished" podID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" containerID="c5c4b388ea2282fb4bcc01aaf66bfc353c25d65b1cf7722a4e42187ff9837a57" exitCode=0 Feb 03 07:28:08 crc kubenswrapper[4708]: I0203 07:28:08.293936 4708 generic.go:334] "Generic (PLEG): container finished" podID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" containerID="8b4171a89a9867d5c8093f0d366d66dfd2476b60dcfff03e52cdca88b6395282" exitCode=143 Feb 03 07:28:08 crc kubenswrapper[4708]: I0203 07:28:08.293741 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3753e142-5f5e-45d7-ad6d-f718ae4abed6","Type":"ContainerDied","Data":"c5c4b388ea2282fb4bcc01aaf66bfc353c25d65b1cf7722a4e42187ff9837a57"} Feb 03 07:28:08 crc kubenswrapper[4708]: I0203 07:28:08.294062 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3753e142-5f5e-45d7-ad6d-f718ae4abed6","Type":"ContainerDied","Data":"8b4171a89a9867d5c8093f0d366d66dfd2476b60dcfff03e52cdca88b6395282"} Feb 03 07:28:08 crc kubenswrapper[4708]: I0203 07:28:08.299163 4708 generic.go:334] "Generic (PLEG): container finished" podID="e7005a50-4e51-4074-8a02-d76857064103" containerID="b1cbf89bd881198e0cd36c7a0a2a3c47e1e6d312d384e560383a696da9b89114" exitCode=0 Feb 03 07:28:08 crc kubenswrapper[4708]: I0203 07:28:08.299204 4708 generic.go:334] "Generic (PLEG): container finished" podID="e7005a50-4e51-4074-8a02-d76857064103" containerID="e013166e4c5fd45777c1726e65ce3e99a7e85ab78d199af7fc8b6a9842c67fe9" exitCode=143 Feb 03 07:28:08 crc kubenswrapper[4708]: I0203 07:28:08.299233 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e7005a50-4e51-4074-8a02-d76857064103","Type":"ContainerDied","Data":"b1cbf89bd881198e0cd36c7a0a2a3c47e1e6d312d384e560383a696da9b89114"} Feb 03 07:28:08 crc kubenswrapper[4708]: I0203 07:28:08.299267 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e7005a50-4e51-4074-8a02-d76857064103","Type":"ContainerDied","Data":"e013166e4c5fd45777c1726e65ce3e99a7e85ab78d199af7fc8b6a9842c67fe9"} Feb 03 07:28:09 crc kubenswrapper[4708]: I0203 07:28:09.311058 4708 generic.go:334] "Generic (PLEG): container finished" podID="f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" containerID="26d9c4a841e463329378ef7d36ba7eec3ebd9de2972c52e2177fb8f07b5b3163" exitCode=0 Feb 03 07:28:09 crc kubenswrapper[4708]: I0203 07:28:09.311100 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-blhdl" event={"ID":"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d","Type":"ContainerDied","Data":"26d9c4a841e463329378ef7d36ba7eec3ebd9de2972c52e2177fb8f07b5b3163"} Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.334482 4708 generic.go:334] "Generic (PLEG): container finished" podID="ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29" containerID="8210fbdb49636d40b4ad1429001b753c58f8dcb8f8763c5be48d3501f7d51f32" exitCode=0 Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.334575 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cl2c7" event={"ID":"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29","Type":"ContainerDied","Data":"8210fbdb49636d40b4ad1429001b753c58f8dcb8f8763c5be48d3501f7d51f32"} Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.780275 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.912465 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-fernet-keys\") pod \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.912548 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-scripts\") pod \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.912576 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-combined-ca-bundle\") pod \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.912617 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-credential-keys\") pod \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.913977 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wm84h\" (UniqueName: \"kubernetes.io/projected/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-kube-api-access-wm84h\") pod \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.914063 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-config-data\") pod \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\" (UID: \"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d\") " Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.917383 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" (UID: "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.919295 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" (UID: "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.919868 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-scripts" (OuterVolumeSpecName: "scripts") pod "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" (UID: "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.921624 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-kube-api-access-wm84h" (OuterVolumeSpecName: "kube-api-access-wm84h") pod "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" (UID: "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d"). InnerVolumeSpecName "kube-api-access-wm84h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.941547 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-config-data" (OuterVolumeSpecName: "config-data") pod "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" (UID: "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:11 crc kubenswrapper[4708]: I0203 07:28:11.943816 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" (UID: "f6b4400d-8fe0-4b9a-985b-5e7854dcd78d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.016952 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wm84h\" (UniqueName: \"kubernetes.io/projected/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-kube-api-access-wm84h\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.017277 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.017287 4708 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.017298 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.017308 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.017317 4708 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.367258 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-8w6c7" event={"ID":"ec5c8613-c88b-4cc5-8ad4-440e65523618","Type":"ContainerStarted","Data":"f9e2ee463415df88279bd080e36fcbc8b9c5820dc9f0b86c82990283f297c768"} Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.372143 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-blhdl" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.372664 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-blhdl" event={"ID":"f6b4400d-8fe0-4b9a-985b-5e7854dcd78d","Type":"ContainerDied","Data":"c977862cb77d9a2b79560c2f9ab6884282a15281c1867e5614b6bc74e07a6cfb"} Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.372687 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c977862cb77d9a2b79560c2f9ab6884282a15281c1867e5614b6bc74e07a6cfb" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.901988 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cl2c7" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.941924 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.967679 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7ddbc898b8-cqt5j"] Feb 03 07:28:12 crc kubenswrapper[4708]: E0203 07:28:12.968245 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" containerName="keystone-bootstrap" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.968259 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" containerName="keystone-bootstrap" Feb 03 07:28:12 crc kubenswrapper[4708]: E0203 07:28:12.968271 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="412c823d-20ba-42cb-9c05-70bee2ee89a2" containerName="init" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.968276 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="412c823d-20ba-42cb-9c05-70bee2ee89a2" containerName="init" Feb 03 07:28:12 crc kubenswrapper[4708]: E0203 07:28:12.968286 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" containerName="glance-httpd" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.968292 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" containerName="glance-httpd" Feb 03 07:28:12 crc kubenswrapper[4708]: E0203 07:28:12.968310 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="412c823d-20ba-42cb-9c05-70bee2ee89a2" containerName="dnsmasq-dns" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.968315 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="412c823d-20ba-42cb-9c05-70bee2ee89a2" containerName="dnsmasq-dns" Feb 03 07:28:12 crc kubenswrapper[4708]: E0203 07:28:12.968362 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29" containerName="placement-db-sync" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.968371 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29" containerName="placement-db-sync" Feb 03 07:28:12 crc kubenswrapper[4708]: E0203 07:28:12.968382 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" containerName="glance-log" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.968387 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" containerName="glance-log" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.969264 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29" containerName="placement-db-sync" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.969280 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="412c823d-20ba-42cb-9c05-70bee2ee89a2" containerName="dnsmasq-dns" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.969291 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" containerName="glance-log" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.969303 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" containerName="glance-httpd" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.969314 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" containerName="keystone-bootstrap" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.969958 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.973083 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.973324 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.973489 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.973772 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x5pvc" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.973943 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.974082 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 03 07:28:12 crc kubenswrapper[4708]: I0203 07:28:12.981183 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7ddbc898b8-cqt5j"] Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043272 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtqf5\" (UniqueName: \"kubernetes.io/projected/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-kube-api-access-xtqf5\") pod \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043340 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-config-data\") pod \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043365 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-logs\") pod \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043404 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-logs\") pod \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043445 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-combined-ca-bundle\") pod \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043511 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-scripts\") pod \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043566 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-config-data\") pod \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\" (UID: \"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043589 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-httpd-run\") pod \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043609 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043660 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-combined-ca-bundle\") pod \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043675 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhrqm\" (UniqueName: \"kubernetes.io/projected/3753e142-5f5e-45d7-ad6d-f718ae4abed6-kube-api-access-rhrqm\") pod \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043691 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-scripts\") pod \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\" (UID: \"3753e142-5f5e-45d7-ad6d-f718ae4abed6\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043745 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-logs" (OuterVolumeSpecName: "logs") pod "ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29" (UID: "ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043913 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-internal-tls-certs\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043937 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-scripts\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043979 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-credential-keys\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.043993 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-public-tls-certs\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.044010 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-config-data\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.044024 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvnpv\" (UniqueName: \"kubernetes.io/projected/11f8a75b-7b47-4838-9751-5a03516154e7-kube-api-access-mvnpv\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.044074 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-fernet-keys\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.044090 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-combined-ca-bundle\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.044151 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.044939 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-logs" (OuterVolumeSpecName: "logs") pod "3753e142-5f5e-45d7-ad6d-f718ae4abed6" (UID: "3753e142-5f5e-45d7-ad6d-f718ae4abed6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.045116 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "3753e142-5f5e-45d7-ad6d-f718ae4abed6" (UID: "3753e142-5f5e-45d7-ad6d-f718ae4abed6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.049484 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3753e142-5f5e-45d7-ad6d-f718ae4abed6-kube-api-access-rhrqm" (OuterVolumeSpecName: "kube-api-access-rhrqm") pod "3753e142-5f5e-45d7-ad6d-f718ae4abed6" (UID: "3753e142-5f5e-45d7-ad6d-f718ae4abed6"). InnerVolumeSpecName "kube-api-access-rhrqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.060382 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "3753e142-5f5e-45d7-ad6d-f718ae4abed6" (UID: "3753e142-5f5e-45d7-ad6d-f718ae4abed6"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.060680 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-scripts" (OuterVolumeSpecName: "scripts") pod "3753e142-5f5e-45d7-ad6d-f718ae4abed6" (UID: "3753e142-5f5e-45d7-ad6d-f718ae4abed6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.060703 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-scripts" (OuterVolumeSpecName: "scripts") pod "ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29" (UID: "ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.060753 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-kube-api-access-xtqf5" (OuterVolumeSpecName: "kube-api-access-xtqf5") pod "ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29" (UID: "ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29"). InnerVolumeSpecName "kube-api-access-xtqf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.070196 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.106781 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29" (UID: "ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.107905 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3753e142-5f5e-45d7-ad6d-f718ae4abed6" (UID: "3753e142-5f5e-45d7-ad6d-f718ae4abed6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.109535 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-config-data" (OuterVolumeSpecName: "config-data") pod "ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29" (UID: "ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.140890 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-config-data" (OuterVolumeSpecName: "config-data") pod "3753e142-5f5e-45d7-ad6d-f718ae4abed6" (UID: "3753e142-5f5e-45d7-ad6d-f718ae4abed6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.145429 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"e7005a50-4e51-4074-8a02-d76857064103\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.145474 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-logs\") pod \"e7005a50-4e51-4074-8a02-d76857064103\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.145501 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-scripts\") pod \"e7005a50-4e51-4074-8a02-d76857064103\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.145582 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-httpd-run\") pod \"e7005a50-4e51-4074-8a02-d76857064103\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.145661 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sblf\" (UniqueName: \"kubernetes.io/projected/e7005a50-4e51-4074-8a02-d76857064103-kube-api-access-8sblf\") pod \"e7005a50-4e51-4074-8a02-d76857064103\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.145678 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-config-data\") pod \"e7005a50-4e51-4074-8a02-d76857064103\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.145765 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-combined-ca-bundle\") pod \"e7005a50-4e51-4074-8a02-d76857064103\" (UID: \"e7005a50-4e51-4074-8a02-d76857064103\") " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.145939 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-fernet-keys\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.145962 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-combined-ca-bundle\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146036 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-internal-tls-certs\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146055 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-scripts\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146094 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-credential-keys\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146109 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-public-tls-certs\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146124 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-config-data\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146139 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvnpv\" (UniqueName: \"kubernetes.io/projected/11f8a75b-7b47-4838-9751-5a03516154e7-kube-api-access-mvnpv\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146212 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146223 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146233 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146242 4708 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146261 4708 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146270 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146279 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhrqm\" (UniqueName: \"kubernetes.io/projected/3753e142-5f5e-45d7-ad6d-f718ae4abed6-kube-api-access-rhrqm\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146289 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146297 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtqf5\" (UniqueName: \"kubernetes.io/projected/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29-kube-api-access-xtqf5\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146306 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3753e142-5f5e-45d7-ad6d-f718ae4abed6-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.146313 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3753e142-5f5e-45d7-ad6d-f718ae4abed6-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.151546 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e7005a50-4e51-4074-8a02-d76857064103" (UID: "e7005a50-4e51-4074-8a02-d76857064103"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.151824 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-logs" (OuterVolumeSpecName: "logs") pod "e7005a50-4e51-4074-8a02-d76857064103" (UID: "e7005a50-4e51-4074-8a02-d76857064103"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.156754 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "e7005a50-4e51-4074-8a02-d76857064103" (UID: "e7005a50-4e51-4074-8a02-d76857064103"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.156782 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-scripts\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.157167 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-combined-ca-bundle\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.157267 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-config-data\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.157648 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7005a50-4e51-4074-8a02-d76857064103-kube-api-access-8sblf" (OuterVolumeSpecName: "kube-api-access-8sblf") pod "e7005a50-4e51-4074-8a02-d76857064103" (UID: "e7005a50-4e51-4074-8a02-d76857064103"). InnerVolumeSpecName "kube-api-access-8sblf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.157982 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-scripts" (OuterVolumeSpecName: "scripts") pod "e7005a50-4e51-4074-8a02-d76857064103" (UID: "e7005a50-4e51-4074-8a02-d76857064103"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.159222 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-credential-keys\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.160650 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-fernet-keys\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.171283 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-internal-tls-certs\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.171752 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/11f8a75b-7b47-4838-9751-5a03516154e7-public-tls-certs\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.174944 4708 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.175305 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvnpv\" (UniqueName: \"kubernetes.io/projected/11f8a75b-7b47-4838-9751-5a03516154e7-kube-api-access-mvnpv\") pod \"keystone-7ddbc898b8-cqt5j\" (UID: \"11f8a75b-7b47-4838-9751-5a03516154e7\") " pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.183487 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7005a50-4e51-4074-8a02-d76857064103" (UID: "e7005a50-4e51-4074-8a02-d76857064103"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.232236 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-config-data" (OuterVolumeSpecName: "config-data") pod "e7005a50-4e51-4074-8a02-d76857064103" (UID: "e7005a50-4e51-4074-8a02-d76857064103"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.248186 4708 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.248213 4708 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.248224 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sblf\" (UniqueName: \"kubernetes.io/projected/e7005a50-4e51-4074-8a02-d76857064103-kube-api-access-8sblf\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.248233 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.248242 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.248267 4708 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.248275 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7005a50-4e51-4074-8a02-d76857064103-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.248285 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7005a50-4e51-4074-8a02-d76857064103-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.262893 4708 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.350237 4708 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.381253 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.418132 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-cl2c7" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.418139 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-cl2c7" event={"ID":"ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29","Type":"ContainerDied","Data":"83f79bbb97d9dbc4caf3d455587c8dbe20dcacaacdac023418218e23e6763f85"} Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.419897 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83f79bbb97d9dbc4caf3d455587c8dbe20dcacaacdac023418218e23e6763f85" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.427262 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3753e142-5f5e-45d7-ad6d-f718ae4abed6","Type":"ContainerDied","Data":"905ffb6e791b3a5b2570efbb619dd5891f40c69cf5354892769264d2a5de6a79"} Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.427319 4708 scope.go:117] "RemoveContainer" containerID="c5c4b388ea2282fb4bcc01aaf66bfc353c25d65b1cf7722a4e42187ff9837a57" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.427466 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.477250 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.477670 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e7005a50-4e51-4074-8a02-d76857064103","Type":"ContainerDied","Data":"ce7b4aeb3dbbb16073fe24f8f78d1af8d17fa87264cf7be5051b238464edf67a"} Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.494894 4708 generic.go:334] "Generic (PLEG): container finished" podID="ec5c8613-c88b-4cc5-8ad4-440e65523618" containerID="f9e2ee463415df88279bd080e36fcbc8b9c5820dc9f0b86c82990283f297c768" exitCode=0 Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.494945 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-8w6c7" event={"ID":"ec5c8613-c88b-4cc5-8ad4-440e65523618","Type":"ContainerDied","Data":"f9e2ee463415df88279bd080e36fcbc8b9c5820dc9f0b86c82990283f297c768"} Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.646159 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-67599f68dd-cgvwn"] Feb 03 07:28:13 crc kubenswrapper[4708]: E0203 07:28:13.646597 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7005a50-4e51-4074-8a02-d76857064103" containerName="glance-log" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.646609 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7005a50-4e51-4074-8a02-d76857064103" containerName="glance-log" Feb 03 07:28:13 crc kubenswrapper[4708]: E0203 07:28:13.646622 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7005a50-4e51-4074-8a02-d76857064103" containerName="glance-httpd" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.646642 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7005a50-4e51-4074-8a02-d76857064103" containerName="glance-httpd" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.646846 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7005a50-4e51-4074-8a02-d76857064103" containerName="glance-log" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.646862 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7005a50-4e51-4074-8a02-d76857064103" containerName="glance-httpd" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.647838 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.660432 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.660619 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-lk7kq" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.660730 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.660850 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.661006 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.695851 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-67599f68dd-cgvwn"] Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.763057 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-internal-tls-certs\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.763129 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-config-data\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.763177 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-scripts\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.763222 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cdhb\" (UniqueName: \"kubernetes.io/projected/11e8be85-5666-4e3d-8964-b0d554d5b1ef-kube-api-access-2cdhb\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.763293 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-public-tls-certs\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.763342 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11e8be85-5666-4e3d-8964-b0d554d5b1ef-logs\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.763383 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-combined-ca-bundle\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.809030 4708 scope.go:117] "RemoveContainer" containerID="8b4171a89a9867d5c8093f0d366d66dfd2476b60dcfff03e52cdca88b6395282" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.848624 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.866844 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-public-tls-certs\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.866927 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11e8be85-5666-4e3d-8964-b0d554d5b1ef-logs\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.866971 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-combined-ca-bundle\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.867013 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-internal-tls-certs\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.867063 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-config-data\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.867109 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-scripts\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.867147 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cdhb\" (UniqueName: \"kubernetes.io/projected/11e8be85-5666-4e3d-8964-b0d554d5b1ef-kube-api-access-2cdhb\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.868446 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.869376 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11e8be85-5666-4e3d-8964-b0d554d5b1ef-logs\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.876834 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-config-data\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.876946 4708 scope.go:117] "RemoveContainer" containerID="b1cbf89bd881198e0cd36c7a0a2a3c47e1e6d312d384e560383a696da9b89114" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.881401 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-public-tls-certs\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.881527 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-internal-tls-certs\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.893914 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.895137 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-scripts\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.895643 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11e8be85-5666-4e3d-8964-b0d554d5b1ef-combined-ca-bundle\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.904075 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cdhb\" (UniqueName: \"kubernetes.io/projected/11e8be85-5666-4e3d-8964-b0d554d5b1ef-kube-api-access-2cdhb\") pod \"placement-67599f68dd-cgvwn\" (UID: \"11e8be85-5666-4e3d-8964-b0d554d5b1ef\") " pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.915548 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.929668 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.931328 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.936400 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rwpwp" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.936909 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.937203 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.937572 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.950223 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.953564 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.956218 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.956453 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.964283 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:28:13 crc kubenswrapper[4708]: I0203 07:28:13.984497 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.035362 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7ddbc898b8-cqt5j"] Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.051304 4708 scope.go:117] "RemoveContainer" containerID="e013166e4c5fd45777c1726e65ce3e99a7e85ab78d199af7fc8b6a9842c67fe9" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.073616 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.073717 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.073749 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-scripts\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.073774 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.073816 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-logs\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.073935 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccsld\" (UniqueName: \"kubernetes.io/projected/23415315-630b-4b47-91ac-ac60c2af15bc-kube-api-access-ccsld\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.073992 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-logs\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.074017 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jdch\" (UniqueName: \"kubernetes.io/projected/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-kube-api-access-9jdch\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.074055 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.074201 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.074272 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-config-data\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.074300 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.074328 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.074357 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.074464 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.074551 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.119416 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3753e142-5f5e-45d7-ad6d-f718ae4abed6" path="/var/lib/kubelet/pods/3753e142-5f5e-45d7-ad6d-f718ae4abed6/volumes" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.120990 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7005a50-4e51-4074-8a02-d76857064103" path="/var/lib/kubelet/pods/e7005a50-4e51-4074-8a02-d76857064103/volumes" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.129448 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.175996 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176082 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176116 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-scripts\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176136 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176157 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-logs\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176178 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccsld\" (UniqueName: \"kubernetes.io/projected/23415315-630b-4b47-91ac-ac60c2af15bc-kube-api-access-ccsld\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176194 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-logs\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176209 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jdch\" (UniqueName: \"kubernetes.io/projected/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-kube-api-access-9jdch\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176224 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176266 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176300 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-config-data\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176314 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176330 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176348 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176398 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.176422 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.181784 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.182431 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.182830 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.183231 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.185193 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.185752 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.186064 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-logs\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.186909 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-logs\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.187172 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.192756 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.193076 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.198698 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.198876 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-config-data\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.207732 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-scripts\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.209511 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccsld\" (UniqueName: \"kubernetes.io/projected/23415315-630b-4b47-91ac-ac60c2af15bc-kube-api-access-ccsld\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.210641 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jdch\" (UniqueName: \"kubernetes.io/projected/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-kube-api-access-9jdch\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.227585 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.255744 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.385208 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.400916 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.520131 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-8w6c7" event={"ID":"ec5c8613-c88b-4cc5-8ad4-440e65523618","Type":"ContainerStarted","Data":"45225be23011835ca772f604ecb495177b51e6431dce31d4b06618b8ccc1ac64"} Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.542473 4708 generic.go:334] "Generic (PLEG): container finished" podID="dcaf1aa4-0bde-49a7-a027-140450f08736" containerID="efd0cc66c29a29935ef515388230d893b8545aede61375169757b998a1a0416c" exitCode=0 Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.542567 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cxbc2" event={"ID":"dcaf1aa4-0bde-49a7-a027-140450f08736","Type":"ContainerDied","Data":"efd0cc66c29a29935ef515388230d893b8545aede61375169757b998a1a0416c"} Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.543400 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-db-sync-8w6c7" podStartSLOduration=15.53582307 podStartE2EDuration="22.543377267s" podCreationTimestamp="2026-02-03 07:27:52 +0000 UTC" firstStartedPulling="2026-02-03 07:28:04.84902273 +0000 UTC m=+1063.830969537" lastFinishedPulling="2026-02-03 07:28:11.856576927 +0000 UTC m=+1070.838523734" observedRunningTime="2026-02-03 07:28:14.542470236 +0000 UTC m=+1073.524417043" watchObservedRunningTime="2026-02-03 07:28:14.543377267 +0000 UTC m=+1073.525324074" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.553242 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7ddbc898b8-cqt5j" event={"ID":"11f8a75b-7b47-4838-9751-5a03516154e7","Type":"ContainerStarted","Data":"2d62dea31c4d8da2c31e3a8a0df99f0d4478c5b3d4fbda636f17ee8b8989e1f4"} Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.553282 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7ddbc898b8-cqt5j" event={"ID":"11f8a75b-7b47-4838-9751-5a03516154e7","Type":"ContainerStarted","Data":"32741ea23437f673649e65204c397fa9ff08284016fe209e3d5eb8f20f558ed8"} Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.553458 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.563271 4708 generic.go:334] "Generic (PLEG): container finished" podID="1a3122ca-fa36-4033-a44f-36d12d0b3f4a" containerID="740a3fafcc5199dcb4e70c85e69211cf63edc3daa6ddc13939b116f97220af85" exitCode=0 Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.563330 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2smzn" event={"ID":"1a3122ca-fa36-4033-a44f-36d12d0b3f4a","Type":"ContainerDied","Data":"740a3fafcc5199dcb4e70c85e69211cf63edc3daa6ddc13939b116f97220af85"} Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.599972 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7ddbc898b8-cqt5j" podStartSLOduration=2.599954323 podStartE2EDuration="2.599954323s" podCreationTimestamp="2026-02-03 07:28:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:14.588168834 +0000 UTC m=+1073.570115641" watchObservedRunningTime="2026-02-03 07:28:14.599954323 +0000 UTC m=+1073.581901130" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.672240 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-67599f68dd-cgvwn"] Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.693113 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.774139 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-n7s6l"] Feb 03 07:28:14 crc kubenswrapper[4708]: I0203 07:28:14.774400 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" podUID="21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" containerName="dnsmasq-dns" containerID="cri-o://3de21f52324ce1921b8558e8eaf43983881da531f4a9bdedd6daf119fa85efff" gracePeriod=10 Feb 03 07:28:15 crc kubenswrapper[4708]: I0203 07:28:15.025932 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:28:15 crc kubenswrapper[4708]: I0203 07:28:15.239381 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:28:15 crc kubenswrapper[4708]: W0203 07:28:15.262494 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f4fde2c_f8ce_4722_93ee_6ddc16b128f7.slice/crio-107e16d6a66b666f0cd66dbe9e26a204b3348d3c17dc9183cbdb94cfea5bd113 WatchSource:0}: Error finding container 107e16d6a66b666f0cd66dbe9e26a204b3348d3c17dc9183cbdb94cfea5bd113: Status 404 returned error can't find the container with id 107e16d6a66b666f0cd66dbe9e26a204b3348d3c17dc9183cbdb94cfea5bd113 Feb 03 07:28:15 crc kubenswrapper[4708]: I0203 07:28:15.647297 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23415315-630b-4b47-91ac-ac60c2af15bc","Type":"ContainerStarted","Data":"2d1ec41ac80b27f5f753cc114b047e247fe73df3e8a34d7eb9c03c1dfe840c5d"} Feb 03 07:28:15 crc kubenswrapper[4708]: I0203 07:28:15.653147 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7","Type":"ContainerStarted","Data":"107e16d6a66b666f0cd66dbe9e26a204b3348d3c17dc9183cbdb94cfea5bd113"} Feb 03 07:28:15 crc kubenswrapper[4708]: I0203 07:28:15.654648 4708 generic.go:334] "Generic (PLEG): container finished" podID="21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" containerID="3de21f52324ce1921b8558e8eaf43983881da531f4a9bdedd6daf119fa85efff" exitCode=0 Feb 03 07:28:15 crc kubenswrapper[4708]: I0203 07:28:15.654687 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" event={"ID":"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb","Type":"ContainerDied","Data":"3de21f52324ce1921b8558e8eaf43983881da531f4a9bdedd6daf119fa85efff"} Feb 03 07:28:15 crc kubenswrapper[4708]: I0203 07:28:15.658549 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67599f68dd-cgvwn" event={"ID":"11e8be85-5666-4e3d-8964-b0d554d5b1ef","Type":"ContainerStarted","Data":"8c883bd00dc53d07360b618d78dc3981c2065ab0449c775a551a3da48325a6c3"} Feb 03 07:28:15 crc kubenswrapper[4708]: I0203 07:28:15.658577 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67599f68dd-cgvwn" event={"ID":"11e8be85-5666-4e3d-8964-b0d554d5b1ef","Type":"ContainerStarted","Data":"c26eead29d0d60a66735dd818b345c1cfa6095050ccfb420a588c3b4a7fd140c"} Feb 03 07:28:16 crc kubenswrapper[4708]: I0203 07:28:16.671757 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23415315-630b-4b47-91ac-ac60c2af15bc","Type":"ContainerStarted","Data":"4949d4af0b3256c66c52e0346ea55d77c3aa7ced56092f943478e2fdd5947463"} Feb 03 07:28:16 crc kubenswrapper[4708]: I0203 07:28:16.674173 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7","Type":"ContainerStarted","Data":"8d3298517ad9b8842840c7bc34dadc402448b09b2a75e3100078876011ed57d2"} Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.418924 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.428916 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.429194 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2smzn" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.527511 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69v4f\" (UniqueName: \"kubernetes.io/projected/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-kube-api-access-69v4f\") pod \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.527901 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-svc\") pod \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.527957 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-config\") pod \"dcaf1aa4-0bde-49a7-a027-140450f08736\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.527975 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-swift-storage-0\") pod \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.527994 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-nb\") pod \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.529237 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-combined-ca-bundle\") pod \"dcaf1aa4-0bde-49a7-a027-140450f08736\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.529290 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-sb\") pod \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.529330 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-config\") pod \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\" (UID: \"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.529399 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-combined-ca-bundle\") pod \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.529569 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-db-sync-config-data\") pod \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.529618 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cg54j\" (UniqueName: \"kubernetes.io/projected/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-kube-api-access-cg54j\") pod \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\" (UID: \"1a3122ca-fa36-4033-a44f-36d12d0b3f4a\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.529642 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c582k\" (UniqueName: \"kubernetes.io/projected/dcaf1aa4-0bde-49a7-a027-140450f08736-kube-api-access-c582k\") pod \"dcaf1aa4-0bde-49a7-a027-140450f08736\" (UID: \"dcaf1aa4-0bde-49a7-a027-140450f08736\") " Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.536057 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcaf1aa4-0bde-49a7-a027-140450f08736-kube-api-access-c582k" (OuterVolumeSpecName: "kube-api-access-c582k") pod "dcaf1aa4-0bde-49a7-a027-140450f08736" (UID: "dcaf1aa4-0bde-49a7-a027-140450f08736"). InnerVolumeSpecName "kube-api-access-c582k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.543192 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-kube-api-access-69v4f" (OuterVolumeSpecName: "kube-api-access-69v4f") pod "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" (UID: "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb"). InnerVolumeSpecName "kube-api-access-69v4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.590473 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "1a3122ca-fa36-4033-a44f-36d12d0b3f4a" (UID: "1a3122ca-fa36-4033-a44f-36d12d0b3f4a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.603613 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-kube-api-access-cg54j" (OuterVolumeSpecName: "kube-api-access-cg54j") pod "1a3122ca-fa36-4033-a44f-36d12d0b3f4a" (UID: "1a3122ca-fa36-4033-a44f-36d12d0b3f4a"). InnerVolumeSpecName "kube-api-access-cg54j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.633042 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cg54j\" (UniqueName: \"kubernetes.io/projected/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-kube-api-access-cg54j\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.633282 4708 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.633440 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c582k\" (UniqueName: \"kubernetes.io/projected/dcaf1aa4-0bde-49a7-a027-140450f08736-kube-api-access-c582k\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.633542 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69v4f\" (UniqueName: \"kubernetes.io/projected/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-kube-api-access-69v4f\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.635782 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a3122ca-fa36-4033-a44f-36d12d0b3f4a" (UID: "1a3122ca-fa36-4033-a44f-36d12d0b3f4a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.636096 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-config" (OuterVolumeSpecName: "config") pod "dcaf1aa4-0bde-49a7-a027-140450f08736" (UID: "dcaf1aa4-0bde-49a7-a027-140450f08736"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.670752 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-config" (OuterVolumeSpecName: "config") pod "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" (UID: "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.673291 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dcaf1aa4-0bde-49a7-a027-140450f08736" (UID: "dcaf1aa4-0bde-49a7-a027-140450f08736"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.682567 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" (UID: "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.683362 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" (UID: "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.688119 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" (UID: "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.691849 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" (UID: "21f50ebb-9d66-4b4b-bde7-b3e0e8057feb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.709862 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cxbc2" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.709892 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cxbc2" event={"ID":"dcaf1aa4-0bde-49a7-a027-140450f08736","Type":"ContainerDied","Data":"7e2ebc0528c8f635830dac451865fe849e068fa0b5a21a03f8c2d01a409c8c2d"} Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.709932 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e2ebc0528c8f635830dac451865fe849e068fa0b5a21a03f8c2d01a409c8c2d" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.713097 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2smzn" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.713351 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2smzn" event={"ID":"1a3122ca-fa36-4033-a44f-36d12d0b3f4a","Type":"ContainerDied","Data":"0131735411e070d57ab066ad86f53664b0cbbfe1d4e9e894851a40541ea676b3"} Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.713393 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0131735411e070d57ab066ad86f53664b0cbbfe1d4e9e894851a40541ea676b3" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.715197 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" event={"ID":"21f50ebb-9d66-4b4b-bde7-b3e0e8057feb","Type":"ContainerDied","Data":"9294024126ea6c2ad6b896e3924c5371eefe4b48eec5d8df8c6d802188c39216"} Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.715227 4708 scope.go:117] "RemoveContainer" containerID="3de21f52324ce1921b8558e8eaf43983881da531f4a9bdedd6daf119fa85efff" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.715350 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.731478 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-67599f68dd-cgvwn" event={"ID":"11e8be85-5666-4e3d-8964-b0d554d5b1ef","Type":"ContainerStarted","Data":"893fe9802dfc03dd84899e56edf6d999889bc7835e92ed4a460fce62f89abf08"} Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.732258 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.732647 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.734825 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.734848 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.734859 4708 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.734870 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.734880 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcaf1aa4-0bde-49a7-a027-140450f08736-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.734887 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.734896 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.734908 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a3122ca-fa36-4033-a44f-36d12d0b3f4a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.736446 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-67599f68dd-cgvwn" podUID="11e8be85-5666-4e3d-8964-b0d554d5b1ef" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.0.151:8778/\": dial tcp 10.217.0.151:8778: connect: connection refused" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.760428 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-67599f68dd-cgvwn" podStartSLOduration=7.760410713 podStartE2EDuration="7.760410713s" podCreationTimestamp="2026-02-03 07:28:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:20.754453457 +0000 UTC m=+1079.736400294" watchObservedRunningTime="2026-02-03 07:28:20.760410713 +0000 UTC m=+1079.742357520" Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.784944 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-n7s6l"] Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.789899 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-n7s6l"] Feb 03 07:28:20 crc kubenswrapper[4708]: I0203 07:28:20.850132 4708 scope.go:117] "RemoveContainer" containerID="791324fb13eabbddd2baf9744b59a7937725c75a27bd96bcd6d8f15d6ca60319" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.740161 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7","Type":"ContainerStarted","Data":"cfcd0a24d7512c21f898ada8ee60c205fdc22386975fa6038833b779dca8ea6d"} Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.743427 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46305c26-487f-4187-a8ae-48fd2319d25a","Type":"ContainerStarted","Data":"c6fe7d6c4788d0785776e8ad0c91a07b6a3d25835e512034d57e333cdd2d7fa1"} Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.745751 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23415315-630b-4b47-91ac-ac60c2af15bc","Type":"ContainerStarted","Data":"5ef0e97126bf1548a5b4cf7e74d0655789449f0fec641413e3e0dfdeeff18b54"} Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.747967 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-w75bv" event={"ID":"fde8edd5-50e0-4bb0-8701-54e0998444a1","Type":"ContainerStarted","Data":"da8fa13fa14d8037f60141061494219bc255d8cbc09e1c7236ac2f6534d3b1d2"} Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.763663 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6cjgm"] Feb 03 07:28:21 crc kubenswrapper[4708]: E0203 07:28:21.764095 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" containerName="dnsmasq-dns" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.764117 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" containerName="dnsmasq-dns" Feb 03 07:28:21 crc kubenswrapper[4708]: E0203 07:28:21.764156 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a3122ca-fa36-4033-a44f-36d12d0b3f4a" containerName="barbican-db-sync" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.764166 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a3122ca-fa36-4033-a44f-36d12d0b3f4a" containerName="barbican-db-sync" Feb 03 07:28:21 crc kubenswrapper[4708]: E0203 07:28:21.764189 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcaf1aa4-0bde-49a7-a027-140450f08736" containerName="neutron-db-sync" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.764198 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcaf1aa4-0bde-49a7-a027-140450f08736" containerName="neutron-db-sync" Feb 03 07:28:21 crc kubenswrapper[4708]: E0203 07:28:21.764212 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" containerName="init" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.764221 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" containerName="init" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.764414 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" containerName="dnsmasq-dns" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.764455 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a3122ca-fa36-4033-a44f-36d12d0b3f4a" containerName="barbican-db-sync" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.764472 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcaf1aa4-0bde-49a7-a027-140450f08736" containerName="neutron-db-sync" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.765535 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.780699 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6cjgm"] Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.801571 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.801552429000001 podStartE2EDuration="8.801552429s" podCreationTimestamp="2026-02-03 07:28:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:21.789242628 +0000 UTC m=+1080.771189435" watchObservedRunningTime="2026-02-03 07:28:21.801552429 +0000 UTC m=+1080.783499226" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.835774 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6cdbf88dd5-z8pqs"] Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.837651 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.848355 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-6xzsv" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.848545 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.848672 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.849913 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.849963 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-config\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.849980 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-svc\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.850086 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.850119 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.850167 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhg8f\" (UniqueName: \"kubernetes.io/projected/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-kube-api-access-bhg8f\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.870861 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6cdbf88dd5-z8pqs"] Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.872716 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.872704881 podStartE2EDuration="8.872704881s" podCreationTimestamp="2026-02-03 07:28:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:21.849101723 +0000 UTC m=+1080.831048530" watchObservedRunningTime="2026-02-03 07:28:21.872704881 +0000 UTC m=+1080.854651688" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.912018 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-db7889686-b8tst"] Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.913425 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.922137 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.930925 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-fb89f7cf6-7s2lp"] Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.932391 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.938134 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.938547 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.938999 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nb4t6" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.938997 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.991673 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhg8f\" (UniqueName: \"kubernetes.io/projected/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-kube-api-access-bhg8f\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.991742 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-ovndb-tls-certs\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.991781 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-config\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.992014 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rxl4\" (UniqueName: \"kubernetes.io/projected/ef60526c-751e-464d-a1c1-a50e343093b7-kube-api-access-5rxl4\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.992064 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-httpd-config\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.992111 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.992145 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-combined-ca-bundle\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.992173 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-config\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.992205 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-svc\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.992435 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.992499 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.993981 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:21 crc kubenswrapper[4708]: I0203 07:28:21.998324 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:21.999636 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-svc\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.000087 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.001955 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-config\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.046942 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-db7889686-b8tst"] Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.062501 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhg8f\" (UniqueName: \"kubernetes.io/projected/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-kube-api-access-bhg8f\") pod \"dnsmasq-dns-55f844cf75-6cjgm\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.094019 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-fb89f7cf6-7s2lp"] Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.103944 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/00f8a942-b096-49d1-b020-c1aa13eb42c4-config-data-custom\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104072 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bblqx\" (UniqueName: \"kubernetes.io/projected/00f8a942-b096-49d1-b020-c1aa13eb42c4-kube-api-access-bblqx\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104134 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-ovndb-tls-certs\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104190 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-config\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104209 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-config-data-custom\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104261 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00f8a942-b096-49d1-b020-c1aa13eb42c4-config-data\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104395 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wjxz\" (UniqueName: \"kubernetes.io/projected/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-kube-api-access-4wjxz\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104446 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rxl4\" (UniqueName: \"kubernetes.io/projected/ef60526c-751e-464d-a1c1-a50e343093b7-kube-api-access-5rxl4\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104464 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-combined-ca-bundle\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104497 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-httpd-config\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104532 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-combined-ca-bundle\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.104586 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00f8a942-b096-49d1-b020-c1aa13eb42c4-combined-ca-bundle\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.116493 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.117640 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00f8a942-b096-49d1-b020-c1aa13eb42c4-logs\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.140940 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-config-data\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.141239 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-logs\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.134636 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-httpd-config\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.121168 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.129690 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-w75bv" podStartSLOduration=3.447376495 podStartE2EDuration="45.129672531s" podCreationTimestamp="2026-02-03 07:27:37 +0000 UTC" firstStartedPulling="2026-02-03 07:27:38.947701385 +0000 UTC m=+1037.929648182" lastFinishedPulling="2026-02-03 07:28:20.629997411 +0000 UTC m=+1079.611944218" observedRunningTime="2026-02-03 07:28:21.915679973 +0000 UTC m=+1080.897626780" watchObservedRunningTime="2026-02-03 07:28:22.129672531 +0000 UTC m=+1081.111619338" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.121215 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.129100 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.149727 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-ovndb-tls-certs\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.184976 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-combined-ca-bundle\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.185483 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rxl4\" (UniqueName: \"kubernetes.io/projected/ef60526c-751e-464d-a1c1-a50e343093b7-kube-api-access-5rxl4\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.193751 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-config\") pod \"neutron-fb89f7cf6-7s2lp\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.221605 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" path="/var/lib/kubelet/pods/21f50ebb-9d66-4b4b-bde7-b3e0e8057feb/volumes" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.243272 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/00f8a942-b096-49d1-b020-c1aa13eb42c4-config-data-custom\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.243551 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bblqx\" (UniqueName: \"kubernetes.io/projected/00f8a942-b096-49d1-b020-c1aa13eb42c4-kube-api-access-bblqx\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.243583 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-config-data-custom\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.243610 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00f8a942-b096-49d1-b020-c1aa13eb42c4-config-data\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.243645 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wjxz\" (UniqueName: \"kubernetes.io/projected/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-kube-api-access-4wjxz\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.243664 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-combined-ca-bundle\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.243692 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00f8a942-b096-49d1-b020-c1aa13eb42c4-combined-ca-bundle\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.243717 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00f8a942-b096-49d1-b020-c1aa13eb42c4-logs\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.243743 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-config-data\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.243762 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-logs\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.250072 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-logs\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.250150 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00f8a942-b096-49d1-b020-c1aa13eb42c4-logs\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.251755 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.251951 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.259285 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6cjgm"] Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.259977 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.273034 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5dd9656794-5cgwc"] Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.274904 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.276718 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.303539 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nb4t6" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.309767 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5dd9656794-5cgwc"] Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.310952 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.324325 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-combined-ca-bundle\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.324396 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00f8a942-b096-49d1-b020-c1aa13eb42c4-combined-ca-bundle\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.340878 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-mmcjs"] Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.342610 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.344204 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wjxz\" (UniqueName: \"kubernetes.io/projected/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-kube-api-access-4wjxz\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.344673 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9dnn\" (UniqueName: \"kubernetes.io/projected/9492f917-fe6a-4076-a2f3-7d43ebee25e0-kube-api-access-h9dnn\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.344746 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.344766 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9492f917-fe6a-4076-a2f3-7d43ebee25e0-logs\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.344812 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-combined-ca-bundle\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.345075 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data-custom\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.346848 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bblqx\" (UniqueName: \"kubernetes.io/projected/00f8a942-b096-49d1-b020-c1aa13eb42c4-kube-api-access-bblqx\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.350876 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-config-data\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.354187 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/00f8a942-b096-49d1-b020-c1aa13eb42c4-config-data-custom\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.354837 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e40bc4a-7f69-410e-b310-4cb12a8a7f58-config-data-custom\") pod \"barbican-keystone-listener-db7889686-b8tst\" (UID: \"4e40bc4a-7f69-410e-b310-4cb12a8a7f58\") " pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.377373 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00f8a942-b096-49d1-b020-c1aa13eb42c4-config-data\") pod \"barbican-worker-6cdbf88dd5-z8pqs\" (UID: \"00f8a942-b096-49d1-b020-c1aa13eb42c4\") " pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.403739 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-mmcjs"] Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.447723 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.447773 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-svc\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.447846 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s746k\" (UniqueName: \"kubernetes.io/projected/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-kube-api-access-s746k\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.447865 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-config\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.447896 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data-custom\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.447913 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.447954 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9dnn\" (UniqueName: \"kubernetes.io/projected/9492f917-fe6a-4076-a2f3-7d43ebee25e0-kube-api-access-h9dnn\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.447975 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.448007 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9492f917-fe6a-4076-a2f3-7d43ebee25e0-logs\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.448024 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.448054 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-combined-ca-bundle\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.456951 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9492f917-fe6a-4076-a2f3-7d43ebee25e0-logs\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.457939 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-combined-ca-bundle\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.465246 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data-custom\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.476873 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.482563 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9dnn\" (UniqueName: \"kubernetes.io/projected/9492f917-fe6a-4076-a2f3-7d43ebee25e0-kube-api-access-h9dnn\") pod \"barbican-api-5dd9656794-5cgwc\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.490389 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-6xzsv" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.499932 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.539600 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.549888 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.549936 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-svc\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.550020 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-config\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.550039 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s746k\" (UniqueName: \"kubernetes.io/projected/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-kube-api-access-s746k\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.550067 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.550128 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.551120 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.551217 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.551906 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-config\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.552551 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-svc\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.560294 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-db7889686-b8tst" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.577376 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.579882 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s746k\" (UniqueName: \"kubernetes.io/projected/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-kube-api-access-s746k\") pod \"dnsmasq-dns-85ff748b95-mmcjs\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.798967 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6cjgm"] Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.862704 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:22 crc kubenswrapper[4708]: I0203 07:28:22.935622 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-58dd9ff6bc-n7s6l" podUID="21f50ebb-9d66-4b4b-bde7-b3e0e8057feb" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.144:5353: i/o timeout" Feb 03 07:28:22 crc kubenswrapper[4708]: W0203 07:28:22.981914 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7787ad08_661d_4148_bf6f_6c0f7b9d4b53.slice/crio-0a8ade3a270e110110d6fe39dea1e071e54d994ee9490643b1db641d685e188e WatchSource:0}: Error finding container 0a8ade3a270e110110d6fe39dea1e071e54d994ee9490643b1db641d685e188e: Status 404 returned error can't find the container with id 0a8ade3a270e110110d6fe39dea1e071e54d994ee9490643b1db641d685e188e Feb 03 07:28:23 crc kubenswrapper[4708]: I0203 07:28:23.063617 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-fb89f7cf6-7s2lp"] Feb 03 07:28:23 crc kubenswrapper[4708]: I0203 07:28:23.689573 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6cdbf88dd5-z8pqs"] Feb 03 07:28:23 crc kubenswrapper[4708]: I0203 07:28:23.760786 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-db7889686-b8tst"] Feb 03 07:28:23 crc kubenswrapper[4708]: W0203 07:28:23.779500 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e40bc4a_7f69_410e_b310_4cb12a8a7f58.slice/crio-90b25a2f903dade9ce56edac8b9674b6bb5c84e7de81f232f08333b848e289be WatchSource:0}: Error finding container 90b25a2f903dade9ce56edac8b9674b6bb5c84e7de81f232f08333b848e289be: Status 404 returned error can't find the container with id 90b25a2f903dade9ce56edac8b9674b6bb5c84e7de81f232f08333b848e289be Feb 03 07:28:23 crc kubenswrapper[4708]: I0203 07:28:23.780424 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:23 crc kubenswrapper[4708]: I0203 07:28:23.804740 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" event={"ID":"00f8a942-b096-49d1-b020-c1aa13eb42c4","Type":"ContainerStarted","Data":"3af208474dad95a84bcecff61e1b0f0c27ca9fb282feb56c24476fdf81aac5eb"} Feb 03 07:28:23 crc kubenswrapper[4708]: I0203 07:28:23.813048 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fb89f7cf6-7s2lp" event={"ID":"ef60526c-751e-464d-a1c1-a50e343093b7","Type":"ContainerStarted","Data":"b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4"} Feb 03 07:28:23 crc kubenswrapper[4708]: I0203 07:28:23.813105 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fb89f7cf6-7s2lp" event={"ID":"ef60526c-751e-464d-a1c1-a50e343093b7","Type":"ContainerStarted","Data":"9f23619057a460ed30640c807f9c9ea0e3e77a9a0bb04774c02f9c7ffb2683c0"} Feb 03 07:28:23 crc kubenswrapper[4708]: I0203 07:28:23.816105 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" event={"ID":"7787ad08-661d-4148-bf6f-6c0f7b9d4b53","Type":"ContainerStarted","Data":"a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f"} Feb 03 07:28:23 crc kubenswrapper[4708]: I0203 07:28:23.816140 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" event={"ID":"7787ad08-661d-4148-bf6f-6c0f7b9d4b53","Type":"ContainerStarted","Data":"0a8ade3a270e110110d6fe39dea1e071e54d994ee9490643b1db641d685e188e"} Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.152558 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.207619 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-mmcjs"] Feb 03 07:28:24 crc kubenswrapper[4708]: W0203 07:28:24.226849 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68c2cee4_5f7b_4d1b_b47c_29e20c5a2d34.slice/crio-354304992c1ed1d5027970f7492bd85ba24c3c488fb1bfa0c2be5b2cb276a35f WatchSource:0}: Error finding container 354304992c1ed1d5027970f7492bd85ba24c3c488fb1bfa0c2be5b2cb276a35f: Status 404 returned error can't find the container with id 354304992c1ed1d5027970f7492bd85ba24c3c488fb1bfa0c2be5b2cb276a35f Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.233084 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5dd9656794-5cgwc"] Feb 03 07:28:24 crc kubenswrapper[4708]: W0203 07:28:24.246936 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9492f917_fe6a_4076_a2f3_7d43ebee25e0.slice/crio-6d21445b62859abebcb8f004314d9758e6343e098ca69100bbcfb759e205be5d WatchSource:0}: Error finding container 6d21445b62859abebcb8f004314d9758e6343e098ca69100bbcfb759e205be5d: Status 404 returned error can't find the container with id 6d21445b62859abebcb8f004314d9758e6343e098ca69100bbcfb759e205be5d Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.315735 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-sb\") pod \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.316117 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-nb\") pod \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.318437 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-swift-storage-0\") pod \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.318541 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhg8f\" (UniqueName: \"kubernetes.io/projected/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-kube-api-access-bhg8f\") pod \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.318592 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-svc\") pod \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.318647 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-config\") pod \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\" (UID: \"7787ad08-661d-4148-bf6f-6c0f7b9d4b53\") " Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.345424 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-kube-api-access-bhg8f" (OuterVolumeSpecName: "kube-api-access-bhg8f") pod "7787ad08-661d-4148-bf6f-6c0f7b9d4b53" (UID: "7787ad08-661d-4148-bf6f-6c0f7b9d4b53"). InnerVolumeSpecName "kube-api-access-bhg8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.362694 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7787ad08-661d-4148-bf6f-6c0f7b9d4b53" (UID: "7787ad08-661d-4148-bf6f-6c0f7b9d4b53"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.362764 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7787ad08-661d-4148-bf6f-6c0f7b9d4b53" (UID: "7787ad08-661d-4148-bf6f-6c0f7b9d4b53"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.366935 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7787ad08-661d-4148-bf6f-6c0f7b9d4b53" (UID: "7787ad08-661d-4148-bf6f-6c0f7b9d4b53"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.383307 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-config" (OuterVolumeSpecName: "config") pod "7787ad08-661d-4148-bf6f-6c0f7b9d4b53" (UID: "7787ad08-661d-4148-bf6f-6c0f7b9d4b53"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.386121 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.386296 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.404000 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.404865 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.425612 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.442623 4708 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.442649 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhg8f\" (UniqueName: \"kubernetes.io/projected/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-kube-api-access-bhg8f\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.442661 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.442670 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.442679 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.454674 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.469338 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.481185 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:24 crc kubenswrapper[4708]: I0203 07:28:24.826189 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fb89f7cf6-7s2lp" event={"ID":"ef60526c-751e-464d-a1c1-a50e343093b7","Type":"ContainerStarted","Data":"ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca"} Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.827561 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dd9656794-5cgwc" event={"ID":"9492f917-fe6a-4076-a2f3-7d43ebee25e0","Type":"ContainerStarted","Data":"6d21445b62859abebcb8f004314d9758e6343e098ca69100bbcfb759e205be5d"} Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.828487 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" event={"ID":"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34","Type":"ContainerStarted","Data":"354304992c1ed1d5027970f7492bd85ba24c3c488fb1bfa0c2be5b2cb276a35f"} Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.829287 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-db7889686-b8tst" event={"ID":"4e40bc4a-7f69-410e-b310-4cb12a8a7f58","Type":"ContainerStarted","Data":"90b25a2f903dade9ce56edac8b9674b6bb5c84e7de81f232f08333b848e289be"} Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.830708 4708 generic.go:334] "Generic (PLEG): container finished" podID="7787ad08-661d-4148-bf6f-6c0f7b9d4b53" containerID="a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f" exitCode=0 Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.831700 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.831746 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" event={"ID":"7787ad08-661d-4148-bf6f-6c0f7b9d4b53","Type":"ContainerDied","Data":"a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f"} Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.831779 4708 scope.go:117] "RemoveContainer" containerID="a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.832097 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.832115 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-6cjgm" event={"ID":"7787ad08-661d-4148-bf6f-6c0f7b9d4b53","Type":"ContainerDied","Data":"0a8ade3a270e110110d6fe39dea1e071e54d994ee9490643b1db641d685e188e"} Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.832134 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.832376 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.832619 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.860509 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-fb89f7cf6-7s2lp" podStartSLOduration=3.860489109 podStartE2EDuration="3.860489109s" podCreationTimestamp="2026-02-03 07:28:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:24.850243167 +0000 UTC m=+1083.832189994" watchObservedRunningTime="2026-02-03 07:28:24.860489109 +0000 UTC m=+1083.842435936" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.879466 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-59974d4f4f-tvqgn"] Feb 03 07:28:25 crc kubenswrapper[4708]: E0203 07:28:24.879861 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7787ad08-661d-4148-bf6f-6c0f7b9d4b53" containerName="init" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.879875 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="7787ad08-661d-4148-bf6f-6c0f7b9d4b53" containerName="init" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.880069 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="7787ad08-661d-4148-bf6f-6c0f7b9d4b53" containerName="init" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.880964 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.882983 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.883267 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:24.887466 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-59974d4f4f-tvqgn"] Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.055174 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr89m\" (UniqueName: \"kubernetes.io/projected/49513617-6e38-4ae1-ae96-b74bf325d19a-kube-api-access-tr89m\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.055234 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-config\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.055261 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-httpd-config\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.055292 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-ovndb-tls-certs\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.055343 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-internal-tls-certs\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.055375 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-public-tls-certs\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.055391 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-combined-ca-bundle\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.149488 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7787ad08-661d-4148-bf6f-6c0f7b9d4b53" (UID: "7787ad08-661d-4148-bf6f-6c0f7b9d4b53"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.157010 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-internal-tls-certs\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.157066 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-public-tls-certs\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.157083 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-combined-ca-bundle\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.157136 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr89m\" (UniqueName: \"kubernetes.io/projected/49513617-6e38-4ae1-ae96-b74bf325d19a-kube-api-access-tr89m\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.157176 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-config\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.157203 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-httpd-config\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.157231 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-ovndb-tls-certs\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.157293 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7787ad08-661d-4148-bf6f-6c0f7b9d4b53-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.164027 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-combined-ca-bundle\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.171878 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-public-tls-certs\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.179189 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-ovndb-tls-certs\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.185343 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-httpd-config\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.185864 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-config\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.195984 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr89m\" (UniqueName: \"kubernetes.io/projected/49513617-6e38-4ae1-ae96-b74bf325d19a-kube-api-access-tr89m\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.196649 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49513617-6e38-4ae1-ae96-b74bf325d19a-internal-tls-certs\") pod \"neutron-59974d4f4f-tvqgn\" (UID: \"49513617-6e38-4ae1-ae96-b74bf325d19a\") " pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.336363 4708 scope.go:117] "RemoveContainer" containerID="a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f" Feb 03 07:28:25 crc kubenswrapper[4708]: E0203 07:28:25.344770 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f\": container with ID starting with a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f not found: ID does not exist" containerID="a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.344856 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f"} err="failed to get container status \"a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f\": rpc error: code = NotFound desc = could not find container \"a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f\": container with ID starting with a8641f016ac041db7a55fb2cc1470adc6e6c578c46abf100c869e75a78cfbb4f not found: ID does not exist" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.399437 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.570167 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6cjgm"] Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.576114 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-6cjgm"] Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.853526 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dd9656794-5cgwc" event={"ID":"9492f917-fe6a-4076-a2f3-7d43ebee25e0","Type":"ContainerStarted","Data":"42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29"} Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.860126 4708 generic.go:334] "Generic (PLEG): container finished" podID="68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" containerID="353916dd6c4b2f21a33049d16e0ce9a9fadfe67d91467b16f8be48b300620b90" exitCode=0 Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.860201 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" event={"ID":"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34","Type":"ContainerDied","Data":"353916dd6c4b2f21a33049d16e0ce9a9fadfe67d91467b16f8be48b300620b90"} Feb 03 07:28:25 crc kubenswrapper[4708]: I0203 07:28:25.869919 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:26 crc kubenswrapper[4708]: W0203 07:28:26.100555 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49513617_6e38_4ae1_ae96_b74bf325d19a.slice/crio-805336385c6b2299c9a1becf668d57bbca446706d899acf380f7f97172cc97bf WatchSource:0}: Error finding container 805336385c6b2299c9a1becf668d57bbca446706d899acf380f7f97172cc97bf: Status 404 returned error can't find the container with id 805336385c6b2299c9a1becf668d57bbca446706d899acf380f7f97172cc97bf Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.114941 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7787ad08-661d-4148-bf6f-6c0f7b9d4b53" path="/var/lib/kubelet/pods/7787ad08-661d-4148-bf6f-6c0f7b9d4b53/volumes" Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.115787 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-59974d4f4f-tvqgn"] Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.872921 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-59974d4f4f-tvqgn" event={"ID":"49513617-6e38-4ae1-ae96-b74bf325d19a","Type":"ContainerStarted","Data":"66861d5e35fbbf677c2ed1a11a0ebc2fbec28e5fdff0b76221db7c4ca8cea02b"} Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.873238 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-59974d4f4f-tvqgn" event={"ID":"49513617-6e38-4ae1-ae96-b74bf325d19a","Type":"ContainerStarted","Data":"805336385c6b2299c9a1becf668d57bbca446706d899acf380f7f97172cc97bf"} Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.888596 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dd9656794-5cgwc" event={"ID":"9492f917-fe6a-4076-a2f3-7d43ebee25e0","Type":"ContainerStarted","Data":"fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489"} Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.889019 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.889040 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.894832 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" event={"ID":"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34","Type":"ContainerStarted","Data":"6835d377390d09c23b80658fb15aeb1b1ea362aac4def772f32f47bbed819553"} Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.894895 4708 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.895065 4708 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.909431 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5dd9656794-5cgwc" podStartSLOduration=4.909411534 podStartE2EDuration="4.909411534s" podCreationTimestamp="2026-02-03 07:28:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:26.905211421 +0000 UTC m=+1085.887158228" watchObservedRunningTime="2026-02-03 07:28:26.909411534 +0000 UTC m=+1085.891358351" Feb 03 07:28:26 crc kubenswrapper[4708]: I0203 07:28:26.959549 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" podStartSLOduration=4.95952595 podStartE2EDuration="4.95952595s" podCreationTimestamp="2026-02-03 07:28:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:26.949303531 +0000 UTC m=+1085.931250338" watchObservedRunningTime="2026-02-03 07:28:26.95952595 +0000 UTC m=+1085.941472767" Feb 03 07:28:27 crc kubenswrapper[4708]: I0203 07:28:27.130776 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:27 crc kubenswrapper[4708]: I0203 07:28:27.134202 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 07:28:27 crc kubenswrapper[4708]: I0203 07:28:27.863558 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.017202 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.485532 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6f676fd47d-s9mvl"] Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.487431 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.491553 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.492017 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.499830 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6f676fd47d-s9mvl"] Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.637771 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-config-data-custom\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.638023 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-logs\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.638079 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-config-data\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.638171 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-internal-tls-certs\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.638239 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-combined-ca-bundle\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.638302 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-public-tls-certs\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.638357 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82wrx\" (UniqueName: \"kubernetes.io/projected/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-kube-api-access-82wrx\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.740230 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-config-data-custom\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.740537 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-logs\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.740616 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-config-data\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.740698 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-internal-tls-certs\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.740814 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-combined-ca-bundle\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.740919 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-public-tls-certs\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.740998 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-logs\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.741150 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82wrx\" (UniqueName: \"kubernetes.io/projected/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-kube-api-access-82wrx\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.746306 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-combined-ca-bundle\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.747520 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-internal-tls-certs\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.747589 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-public-tls-certs\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.747718 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-config-data-custom\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.748199 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-config-data\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.771767 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82wrx\" (UniqueName: \"kubernetes.io/projected/7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e-kube-api-access-82wrx\") pod \"barbican-api-6f676fd47d-s9mvl\" (UID: \"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e\") " pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.803096 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.927988 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-59974d4f4f-tvqgn" event={"ID":"49513617-6e38-4ae1-ae96-b74bf325d19a","Type":"ContainerStarted","Data":"0ebf0f0e7a565c2f680f998264fd25d40e0649030789c31f3c50b29aa9e05d27"} Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.928370 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:28 crc kubenswrapper[4708]: I0203 07:28:28.952420 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-59974d4f4f-tvqgn" podStartSLOduration=4.952401333 podStartE2EDuration="4.952401333s" podCreationTimestamp="2026-02-03 07:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:28.947527495 +0000 UTC m=+1087.929474302" watchObservedRunningTime="2026-02-03 07:28:28.952401333 +0000 UTC m=+1087.934348140" Feb 03 07:28:29 crc kubenswrapper[4708]: I0203 07:28:29.042322 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 07:28:32 crc kubenswrapper[4708]: I0203 07:28:32.863936 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:32 crc kubenswrapper[4708]: I0203 07:28:32.936277 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-hwghb"] Feb 03 07:28:32 crc kubenswrapper[4708]: I0203 07:28:32.936760 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" podUID="2739cdfe-549c-495a-ac6c-7f6cb96de9a4" containerName="dnsmasq-dns" containerID="cri-o://c63524d993b523334425c38ab56f0748609e5877d97af5e7d7f6749f20d506ce" gracePeriod=10 Feb 03 07:28:34 crc kubenswrapper[4708]: I0203 07:28:34.691489 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" podUID="2739cdfe-549c-495a-ac6c-7f6cb96de9a4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.147:5353: connect: connection refused" Feb 03 07:28:35 crc kubenswrapper[4708]: I0203 07:28:35.831708 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5dd9656794-5cgwc" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:28:35 crc kubenswrapper[4708]: I0203 07:28:35.840329 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-5dd9656794-5cgwc" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:28:35 crc kubenswrapper[4708]: I0203 07:28:35.847263 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5dd9656794-5cgwc" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 03 07:28:35 crc kubenswrapper[4708]: I0203 07:28:35.995171 4708 generic.go:334] "Generic (PLEG): container finished" podID="2739cdfe-549c-495a-ac6c-7f6cb96de9a4" containerID="c63524d993b523334425c38ab56f0748609e5877d97af5e7d7f6749f20d506ce" exitCode=0 Feb 03 07:28:35 crc kubenswrapper[4708]: I0203 07:28:35.995238 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" event={"ID":"2739cdfe-549c-495a-ac6c-7f6cb96de9a4","Type":"ContainerDied","Data":"c63524d993b523334425c38ab56f0748609e5877d97af5e7d7f6749f20d506ce"} Feb 03 07:28:37 crc kubenswrapper[4708]: I0203 07:28:37.647754 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:37 crc kubenswrapper[4708]: I0203 07:28:37.841986 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:28:37 crc kubenswrapper[4708]: I0203 07:28:37.968326 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-config\") pod \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " Feb 03 07:28:37 crc kubenswrapper[4708]: I0203 07:28:37.968409 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpjx5\" (UniqueName: \"kubernetes.io/projected/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-kube-api-access-hpjx5\") pod \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " Feb 03 07:28:37 crc kubenswrapper[4708]: I0203 07:28:37.968461 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-svc\") pod \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " Feb 03 07:28:37 crc kubenswrapper[4708]: I0203 07:28:37.968542 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-swift-storage-0\") pod \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " Feb 03 07:28:37 crc kubenswrapper[4708]: I0203 07:28:37.968568 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-sb\") pod \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " Feb 03 07:28:37 crc kubenswrapper[4708]: I0203 07:28:37.968610 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-nb\") pod \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\" (UID: \"2739cdfe-549c-495a-ac6c-7f6cb96de9a4\") " Feb 03 07:28:37 crc kubenswrapper[4708]: I0203 07:28:37.984027 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-kube-api-access-hpjx5" (OuterVolumeSpecName: "kube-api-access-hpjx5") pod "2739cdfe-549c-495a-ac6c-7f6cb96de9a4" (UID: "2739cdfe-549c-495a-ac6c-7f6cb96de9a4"). InnerVolumeSpecName "kube-api-access-hpjx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.026380 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" event={"ID":"2739cdfe-549c-495a-ac6c-7f6cb96de9a4","Type":"ContainerDied","Data":"2aacd1901cd490917c4da322b580a2cb5095ab8bab297131961634962103ea30"} Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.026441 4708 scope.go:117] "RemoveContainer" containerID="c63524d993b523334425c38ab56f0748609e5877d97af5e7d7f6749f20d506ce" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.026479 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-hwghb" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.057629 4708 scope.go:117] "RemoveContainer" containerID="7b647a3e19efa8da5c3bb396f17dae62c898c85c11d027d948fd32a5c43995e9" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.071573 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpjx5\" (UniqueName: \"kubernetes.io/projected/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-kube-api-access-hpjx5\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.074105 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2739cdfe-549c-495a-ac6c-7f6cb96de9a4" (UID: "2739cdfe-549c-495a-ac6c-7f6cb96de9a4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.074921 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2739cdfe-549c-495a-ac6c-7f6cb96de9a4" (UID: "2739cdfe-549c-495a-ac6c-7f6cb96de9a4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.082048 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2739cdfe-549c-495a-ac6c-7f6cb96de9a4" (UID: "2739cdfe-549c-495a-ac6c-7f6cb96de9a4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.082391 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2739cdfe-549c-495a-ac6c-7f6cb96de9a4" (UID: "2739cdfe-549c-495a-ac6c-7f6cb96de9a4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.106137 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-config" (OuterVolumeSpecName: "config") pod "2739cdfe-549c-495a-ac6c-7f6cb96de9a4" (UID: "2739cdfe-549c-495a-ac6c-7f6cb96de9a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.130500 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6f676fd47d-s9mvl"] Feb 03 07:28:38 crc kubenswrapper[4708]: W0203 07:28:38.146788 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e8eb0a1_9803_4750_9f2e_5cdb5b86a41e.slice/crio-3e014582b5de37d92f26e10ddd05e7243d5d6224efef7f7c25a07f0501aed4c1 WatchSource:0}: Error finding container 3e014582b5de37d92f26e10ddd05e7243d5d6224efef7f7c25a07f0501aed4c1: Status 404 returned error can't find the container with id 3e014582b5de37d92f26e10ddd05e7243d5d6224efef7f7c25a07f0501aed4c1 Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.173739 4708 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.173773 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.173785 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.173819 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.173833 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2739cdfe-549c-495a-ac6c-7f6cb96de9a4-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.353875 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-hwghb"] Feb 03 07:28:38 crc kubenswrapper[4708]: I0203 07:28:38.363289 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-hwghb"] Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.036715 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-db7889686-b8tst" event={"ID":"4e40bc4a-7f69-410e-b310-4cb12a8a7f58","Type":"ContainerStarted","Data":"153f76d39f9ca9a0186064594e71b83a6978bdfae9881c6eb6f27352860e8b1a"} Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.037081 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-db7889686-b8tst" event={"ID":"4e40bc4a-7f69-410e-b310-4cb12a8a7f58","Type":"ContainerStarted","Data":"b67d2ebb09ceba48093a52da9eb8e91c7a0fd24805a150f7af05a2a21929b35c"} Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.038706 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f676fd47d-s9mvl" event={"ID":"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e","Type":"ContainerStarted","Data":"c125b5ed2c267ac3a58baa4333f6079d6c6d0eaa7193a54bc9504b49719bd866"} Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.038750 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f676fd47d-s9mvl" event={"ID":"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e","Type":"ContainerStarted","Data":"d02d9766752ce79100881d174df94dc91964b77930a9bad6b3a7aeee865ef10a"} Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.038760 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f676fd47d-s9mvl" event={"ID":"7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e","Type":"ContainerStarted","Data":"3e014582b5de37d92f26e10ddd05e7243d5d6224efef7f7c25a07f0501aed4c1"} Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.038821 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.038880 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.040612 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" event={"ID":"00f8a942-b096-49d1-b020-c1aa13eb42c4","Type":"ContainerStarted","Data":"054ad43a600c878ddca410dc6a741eda5fe342281ddc5a7c5a077a481418ca31"} Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.040762 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" event={"ID":"00f8a942-b096-49d1-b020-c1aa13eb42c4","Type":"ContainerStarted","Data":"baefbf49d54315d73239e41341a526856baa212fba399d8147518033f62310e2"} Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.046187 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46305c26-487f-4187-a8ae-48fd2319d25a","Type":"ContainerStarted","Data":"8418b11a0b52c4d367b2fe89a33fe20096e713a906b9ab11b43f0195cc97d3b5"} Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.046504 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="ceilometer-central-agent" containerID="cri-o://cc9c1d8b39a8e19ddeda841a7167cce7198b34f64efd8c521320ce2762ac0382" gracePeriod=30 Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.046635 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="proxy-httpd" containerID="cri-o://8418b11a0b52c4d367b2fe89a33fe20096e713a906b9ab11b43f0195cc97d3b5" gracePeriod=30 Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.046698 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="sg-core" containerID="cri-o://c6fe7d6c4788d0785776e8ad0c91a07b6a3d25835e512034d57e333cdd2d7fa1" gracePeriod=30 Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.046729 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="ceilometer-notification-agent" containerID="cri-o://7ba1c6e5b06e4d3086cb7edbcc917f45894325a6286b31509c170c2ab9bb888a" gracePeriod=30 Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.046877 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.064425 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-db7889686-b8tst" podStartSLOduration=4.227315377 podStartE2EDuration="18.064398523s" podCreationTimestamp="2026-02-03 07:28:21 +0000 UTC" firstStartedPulling="2026-02-03 07:28:23.781870195 +0000 UTC m=+1082.763817002" lastFinishedPulling="2026-02-03 07:28:37.618953341 +0000 UTC m=+1096.600900148" observedRunningTime="2026-02-03 07:28:39.05567401 +0000 UTC m=+1098.037620837" watchObservedRunningTime="2026-02-03 07:28:39.064398523 +0000 UTC m=+1098.046345330" Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.081972 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6f676fd47d-s9mvl" podStartSLOduration=11.081951124 podStartE2EDuration="11.081951124s" podCreationTimestamp="2026-02-03 07:28:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:39.081309198 +0000 UTC m=+1098.063256005" watchObservedRunningTime="2026-02-03 07:28:39.081951124 +0000 UTC m=+1098.063897931" Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.088069 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.136364 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.404929627 podStartE2EDuration="1m2.136343635s" podCreationTimestamp="2026-02-03 07:27:37 +0000 UTC" firstStartedPulling="2026-02-03 07:27:38.889192303 +0000 UTC m=+1037.871139130" lastFinishedPulling="2026-02-03 07:28:37.620606331 +0000 UTC m=+1096.602553138" observedRunningTime="2026-02-03 07:28:39.113444204 +0000 UTC m=+1098.095391031" watchObservedRunningTime="2026-02-03 07:28:39.136343635 +0000 UTC m=+1098.118290432" Feb 03 07:28:39 crc kubenswrapper[4708]: I0203 07:28:39.138843 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6cdbf88dd5-z8pqs" podStartSLOduration=4.2931070479999995 podStartE2EDuration="18.138832076s" podCreationTimestamp="2026-02-03 07:28:21 +0000 UTC" firstStartedPulling="2026-02-03 07:28:23.712749603 +0000 UTC m=+1082.694696410" lastFinishedPulling="2026-02-03 07:28:37.558474631 +0000 UTC m=+1096.540421438" observedRunningTime="2026-02-03 07:28:39.131498246 +0000 UTC m=+1098.113445063" watchObservedRunningTime="2026-02-03 07:28:39.138832076 +0000 UTC m=+1098.120778873" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.059563 4708 generic.go:334] "Generic (PLEG): container finished" podID="fde8edd5-50e0-4bb0-8701-54e0998444a1" containerID="da8fa13fa14d8037f60141061494219bc255d8cbc09e1c7236ac2f6534d3b1d2" exitCode=0 Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.059743 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-w75bv" event={"ID":"fde8edd5-50e0-4bb0-8701-54e0998444a1","Type":"ContainerDied","Data":"da8fa13fa14d8037f60141061494219bc255d8cbc09e1c7236ac2f6534d3b1d2"} Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.066851 4708 generic.go:334] "Generic (PLEG): container finished" podID="46305c26-487f-4187-a8ae-48fd2319d25a" containerID="8418b11a0b52c4d367b2fe89a33fe20096e713a906b9ab11b43f0195cc97d3b5" exitCode=0 Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.066877 4708 generic.go:334] "Generic (PLEG): container finished" podID="46305c26-487f-4187-a8ae-48fd2319d25a" containerID="c6fe7d6c4788d0785776e8ad0c91a07b6a3d25835e512034d57e333cdd2d7fa1" exitCode=2 Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.066884 4708 generic.go:334] "Generic (PLEG): container finished" podID="46305c26-487f-4187-a8ae-48fd2319d25a" containerID="7ba1c6e5b06e4d3086cb7edbcc917f45894325a6286b31509c170c2ab9bb888a" exitCode=0 Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.066890 4708 generic.go:334] "Generic (PLEG): container finished" podID="46305c26-487f-4187-a8ae-48fd2319d25a" containerID="cc9c1d8b39a8e19ddeda841a7167cce7198b34f64efd8c521320ce2762ac0382" exitCode=0 Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.066998 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46305c26-487f-4187-a8ae-48fd2319d25a","Type":"ContainerDied","Data":"8418b11a0b52c4d367b2fe89a33fe20096e713a906b9ab11b43f0195cc97d3b5"} Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.067036 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46305c26-487f-4187-a8ae-48fd2319d25a","Type":"ContainerDied","Data":"c6fe7d6c4788d0785776e8ad0c91a07b6a3d25835e512034d57e333cdd2d7fa1"} Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.067050 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46305c26-487f-4187-a8ae-48fd2319d25a","Type":"ContainerDied","Data":"7ba1c6e5b06e4d3086cb7edbcc917f45894325a6286b31509c170c2ab9bb888a"} Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.067063 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46305c26-487f-4187-a8ae-48fd2319d25a","Type":"ContainerDied","Data":"cc9c1d8b39a8e19ddeda841a7167cce7198b34f64efd8c521320ce2762ac0382"} Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.067075 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"46305c26-487f-4187-a8ae-48fd2319d25a","Type":"ContainerDied","Data":"12cce954c343dca2cd354edb3d7ac900ee66c854d07cc95ee45749ed5bc05f77"} Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.067087 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12cce954c343dca2cd354edb3d7ac900ee66c854d07cc95ee45749ed5bc05f77" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.103429 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2739cdfe-549c-495a-ac6c-7f6cb96de9a4" path="/var/lib/kubelet/pods/2739cdfe-549c-495a-ac6c-7f6cb96de9a4/volumes" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.119629 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.310773 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-sg-core-conf-yaml\") pod \"46305c26-487f-4187-a8ae-48fd2319d25a\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.310850 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-scripts\") pod \"46305c26-487f-4187-a8ae-48fd2319d25a\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.310912 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ws9bf\" (UniqueName: \"kubernetes.io/projected/46305c26-487f-4187-a8ae-48fd2319d25a-kube-api-access-ws9bf\") pod \"46305c26-487f-4187-a8ae-48fd2319d25a\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.311004 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-log-httpd\") pod \"46305c26-487f-4187-a8ae-48fd2319d25a\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.311046 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-combined-ca-bundle\") pod \"46305c26-487f-4187-a8ae-48fd2319d25a\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.311100 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-run-httpd\") pod \"46305c26-487f-4187-a8ae-48fd2319d25a\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.311164 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-config-data\") pod \"46305c26-487f-4187-a8ae-48fd2319d25a\" (UID: \"46305c26-487f-4187-a8ae-48fd2319d25a\") " Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.312709 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "46305c26-487f-4187-a8ae-48fd2319d25a" (UID: "46305c26-487f-4187-a8ae-48fd2319d25a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.313786 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "46305c26-487f-4187-a8ae-48fd2319d25a" (UID: "46305c26-487f-4187-a8ae-48fd2319d25a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.318290 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-scripts" (OuterVolumeSpecName: "scripts") pod "46305c26-487f-4187-a8ae-48fd2319d25a" (UID: "46305c26-487f-4187-a8ae-48fd2319d25a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.319333 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46305c26-487f-4187-a8ae-48fd2319d25a-kube-api-access-ws9bf" (OuterVolumeSpecName: "kube-api-access-ws9bf") pod "46305c26-487f-4187-a8ae-48fd2319d25a" (UID: "46305c26-487f-4187-a8ae-48fd2319d25a"). InnerVolumeSpecName "kube-api-access-ws9bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.337566 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "46305c26-487f-4187-a8ae-48fd2319d25a" (UID: "46305c26-487f-4187-a8ae-48fd2319d25a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.379207 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46305c26-487f-4187-a8ae-48fd2319d25a" (UID: "46305c26-487f-4187-a8ae-48fd2319d25a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.402115 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-config-data" (OuterVolumeSpecName: "config-data") pod "46305c26-487f-4187-a8ae-48fd2319d25a" (UID: "46305c26-487f-4187-a8ae-48fd2319d25a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.413454 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.413505 4708 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.413525 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.413545 4708 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.413563 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46305c26-487f-4187-a8ae-48fd2319d25a-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.413581 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ws9bf\" (UniqueName: \"kubernetes.io/projected/46305c26-487f-4187-a8ae-48fd2319d25a-kube-api-access-ws9bf\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:40 crc kubenswrapper[4708]: I0203 07:28:40.413599 4708 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/46305c26-487f-4187-a8ae-48fd2319d25a-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.106884 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.160395 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.169691 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189248 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:28:41 crc kubenswrapper[4708]: E0203 07:28:41.189571 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="proxy-httpd" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189591 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="proxy-httpd" Feb 03 07:28:41 crc kubenswrapper[4708]: E0203 07:28:41.189604 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="ceilometer-notification-agent" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189610 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="ceilometer-notification-agent" Feb 03 07:28:41 crc kubenswrapper[4708]: E0203 07:28:41.189621 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="sg-core" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189627 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="sg-core" Feb 03 07:28:41 crc kubenswrapper[4708]: E0203 07:28:41.189639 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="ceilometer-central-agent" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189646 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="ceilometer-central-agent" Feb 03 07:28:41 crc kubenswrapper[4708]: E0203 07:28:41.189664 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2739cdfe-549c-495a-ac6c-7f6cb96de9a4" containerName="dnsmasq-dns" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189670 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="2739cdfe-549c-495a-ac6c-7f6cb96de9a4" containerName="dnsmasq-dns" Feb 03 07:28:41 crc kubenswrapper[4708]: E0203 07:28:41.189694 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2739cdfe-549c-495a-ac6c-7f6cb96de9a4" containerName="init" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189701 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="2739cdfe-549c-495a-ac6c-7f6cb96de9a4" containerName="init" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189886 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="2739cdfe-549c-495a-ac6c-7f6cb96de9a4" containerName="dnsmasq-dns" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189909 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="ceilometer-central-agent" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189924 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="ceilometer-notification-agent" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189936 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="sg-core" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.189944 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" containerName="proxy-httpd" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.191938 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.195408 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.200404 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.205057 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.331328 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-config-data\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.331380 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-scripts\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.331420 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pckj\" (UniqueName: \"kubernetes.io/projected/3aa4c5d7-28f2-4fa2-9430-4865754b335e-kube-api-access-5pckj\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.331464 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-run-httpd\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.331519 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-log-httpd\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.331540 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.331618 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.432849 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.432923 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-config-data\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.432946 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-scripts\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.432972 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pckj\" (UniqueName: \"kubernetes.io/projected/3aa4c5d7-28f2-4fa2-9430-4865754b335e-kube-api-access-5pckj\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.433000 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-run-httpd\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.433035 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-log-httpd\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.433050 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.436831 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-run-httpd\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.436906 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-log-httpd\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.437674 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.437680 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-scripts\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.440042 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.448007 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-config-data\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.459311 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pckj\" (UniqueName: \"kubernetes.io/projected/3aa4c5d7-28f2-4fa2-9430-4865754b335e-kube-api-access-5pckj\") pod \"ceilometer-0\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.515432 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.590501 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-w75bv" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.737605 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxlq6\" (UniqueName: \"kubernetes.io/projected/fde8edd5-50e0-4bb0-8701-54e0998444a1-kube-api-access-xxlq6\") pod \"fde8edd5-50e0-4bb0-8701-54e0998444a1\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.738020 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-config-data\") pod \"fde8edd5-50e0-4bb0-8701-54e0998444a1\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.738061 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-scripts\") pod \"fde8edd5-50e0-4bb0-8701-54e0998444a1\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.738100 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fde8edd5-50e0-4bb0-8701-54e0998444a1-etc-machine-id\") pod \"fde8edd5-50e0-4bb0-8701-54e0998444a1\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.738144 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-combined-ca-bundle\") pod \"fde8edd5-50e0-4bb0-8701-54e0998444a1\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.738224 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-db-sync-config-data\") pod \"fde8edd5-50e0-4bb0-8701-54e0998444a1\" (UID: \"fde8edd5-50e0-4bb0-8701-54e0998444a1\") " Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.738483 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fde8edd5-50e0-4bb0-8701-54e0998444a1-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "fde8edd5-50e0-4bb0-8701-54e0998444a1" (UID: "fde8edd5-50e0-4bb0-8701-54e0998444a1"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.739071 4708 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fde8edd5-50e0-4bb0-8701-54e0998444a1-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.742083 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fde8edd5-50e0-4bb0-8701-54e0998444a1-kube-api-access-xxlq6" (OuterVolumeSpecName: "kube-api-access-xxlq6") pod "fde8edd5-50e0-4bb0-8701-54e0998444a1" (UID: "fde8edd5-50e0-4bb0-8701-54e0998444a1"). InnerVolumeSpecName "kube-api-access-xxlq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.743096 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-scripts" (OuterVolumeSpecName: "scripts") pod "fde8edd5-50e0-4bb0-8701-54e0998444a1" (UID: "fde8edd5-50e0-4bb0-8701-54e0998444a1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.750003 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "fde8edd5-50e0-4bb0-8701-54e0998444a1" (UID: "fde8edd5-50e0-4bb0-8701-54e0998444a1"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.768271 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fde8edd5-50e0-4bb0-8701-54e0998444a1" (UID: "fde8edd5-50e0-4bb0-8701-54e0998444a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.783010 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-config-data" (OuterVolumeSpecName: "config-data") pod "fde8edd5-50e0-4bb0-8701-54e0998444a1" (UID: "fde8edd5-50e0-4bb0-8701-54e0998444a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.841267 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.841301 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.841315 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.841328 4708 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fde8edd5-50e0-4bb0-8701-54e0998444a1-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.841338 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxlq6\" (UniqueName: \"kubernetes.io/projected/fde8edd5-50e0-4bb0-8701-54e0998444a1-kube-api-access-xxlq6\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:41 crc kubenswrapper[4708]: I0203 07:28:41.956258 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.106233 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46305c26-487f-4187-a8ae-48fd2319d25a" path="/var/lib/kubelet/pods/46305c26-487f-4187-a8ae-48fd2319d25a/volumes" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.119666 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3aa4c5d7-28f2-4fa2-9430-4865754b335e","Type":"ContainerStarted","Data":"e18e7c42adce1072464179115012f13cda533dd77b189538923906d964166717"} Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.123228 4708 generic.go:334] "Generic (PLEG): container finished" podID="ec5c8613-c88b-4cc5-8ad4-440e65523618" containerID="45225be23011835ca772f604ecb495177b51e6431dce31d4b06618b8ccc1ac64" exitCode=0 Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.123298 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-8w6c7" event={"ID":"ec5c8613-c88b-4cc5-8ad4-440e65523618","Type":"ContainerDied","Data":"45225be23011835ca772f604ecb495177b51e6431dce31d4b06618b8ccc1ac64"} Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.125503 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-w75bv" event={"ID":"fde8edd5-50e0-4bb0-8701-54e0998444a1","Type":"ContainerDied","Data":"6077bdda0c5e511ea0097263c84f0cb5a7e6f035b8d609e351a8b4d00aa59987"} Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.125533 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6077bdda0c5e511ea0097263c84f0cb5a7e6f035b8d609e351a8b4d00aa59987" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.125577 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-w75bv" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.387592 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:28:42 crc kubenswrapper[4708]: E0203 07:28:42.388051 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fde8edd5-50e0-4bb0-8701-54e0998444a1" containerName="cinder-db-sync" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.388074 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="fde8edd5-50e0-4bb0-8701-54e0998444a1" containerName="cinder-db-sync" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.388545 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="fde8edd5-50e0-4bb0-8701-54e0998444a1" containerName="cinder-db-sync" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.389692 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.395484 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.395588 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.398073 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.398340 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-jlkn6" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.416362 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.481970 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-b8p62"] Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.483215 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.501457 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-b8p62"] Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.552860 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svrdd\" (UniqueName: \"kubernetes.io/projected/87d3a294-c4b6-4ddf-9f60-c6afede1752a-kube-api-access-svrdd\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.552925 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-scripts\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.552943 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.553064 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.553182 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.553240 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/87d3a294-c4b6-4ddf-9f60-c6afede1752a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654301 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654360 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654386 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/87d3a294-c4b6-4ddf-9f60-c6afede1752a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654410 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svrdd\" (UniqueName: \"kubernetes.io/projected/87d3a294-c4b6-4ddf-9f60-c6afede1752a-kube-api-access-svrdd\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654435 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654455 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654505 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-scripts\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654523 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654560 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654593 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-config\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654611 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654636 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zvqg\" (UniqueName: \"kubernetes.io/projected/d80380a3-aaeb-40d6-a30f-ced06d3885d4-kube-api-access-2zvqg\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.654975 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/87d3a294-c4b6-4ddf-9f60-c6afede1752a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.660878 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.660900 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-scripts\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.663300 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.663335 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.674854 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.676471 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.678209 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.680387 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svrdd\" (UniqueName: \"kubernetes.io/projected/87d3a294-c4b6-4ddf-9f60-c6afede1752a-kube-api-access-svrdd\") pod \"cinder-scheduler-0\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.715765 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.724075 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.757203 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.757299 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.757325 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.757436 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-config\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.757459 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.757515 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zvqg\" (UniqueName: \"kubernetes.io/projected/d80380a3-aaeb-40d6-a30f-ced06d3885d4-kube-api-access-2zvqg\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.758371 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.758530 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-config\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.758667 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.759037 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.759613 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.781475 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zvqg\" (UniqueName: \"kubernetes.io/projected/d80380a3-aaeb-40d6-a30f-ced06d3885d4-kube-api-access-2zvqg\") pod \"dnsmasq-dns-5c9776ccc5-b8p62\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.852866 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.861671 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b3ffce46-abee-4152-a945-f60062643ac0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.861741 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data-custom\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.861849 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk9gm\" (UniqueName: \"kubernetes.io/projected/b3ffce46-abee-4152-a945-f60062643ac0-kube-api-access-zk9gm\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.861903 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.861969 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-scripts\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.862100 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3ffce46-abee-4152-a945-f60062643ac0-logs\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.862178 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.965056 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-scripts\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.965400 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3ffce46-abee-4152-a945-f60062643ac0-logs\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.965467 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.965521 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b3ffce46-abee-4152-a945-f60062643ac0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.965549 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data-custom\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.965605 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk9gm\" (UniqueName: \"kubernetes.io/projected/b3ffce46-abee-4152-a945-f60062643ac0-kube-api-access-zk9gm\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.965646 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.985480 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.985833 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3ffce46-abee-4152-a945-f60062643ac0-logs\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.986863 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b3ffce46-abee-4152-a945-f60062643ac0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.990484 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:42 crc kubenswrapper[4708]: I0203 07:28:42.998624 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-scripts\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.001212 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data-custom\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.015355 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk9gm\" (UniqueName: \"kubernetes.io/projected/b3ffce46-abee-4152-a945-f60062643ac0-kube-api-access-zk9gm\") pod \"cinder-api-0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " pod="openstack/cinder-api-0" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.149234 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3aa4c5d7-28f2-4fa2-9430-4865754b335e","Type":"ContainerStarted","Data":"49f614fc3555c349bf26cd79ba6a6429ff608907148876b7afc98942341e68fc"} Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.296078 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:28:43 crc kubenswrapper[4708]: W0203 07:28:43.300944 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87d3a294_c4b6_4ddf_9f60_c6afede1752a.slice/crio-1f39bdfcf9124b095f03f5e2066d27f5cf8147e14a13269c2d7e13db3a4471c9 WatchSource:0}: Error finding container 1f39bdfcf9124b095f03f5e2066d27f5cf8147e14a13269c2d7e13db3a4471c9: Status 404 returned error can't find the container with id 1f39bdfcf9124b095f03f5e2066d27f5cf8147e14a13269c2d7e13db3a4471c9 Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.312561 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.404764 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-b8p62"] Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.480437 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.679253 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/ec5c8613-c88b-4cc5-8ad4-440e65523618-etc-podinfo\") pod \"ec5c8613-c88b-4cc5-8ad4-440e65523618\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.679317 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99r6w\" (UniqueName: \"kubernetes.io/projected/ec5c8613-c88b-4cc5-8ad4-440e65523618-kube-api-access-99r6w\") pod \"ec5c8613-c88b-4cc5-8ad4-440e65523618\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.679345 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-combined-ca-bundle\") pod \"ec5c8613-c88b-4cc5-8ad4-440e65523618\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.679498 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data\") pod \"ec5c8613-c88b-4cc5-8ad4-440e65523618\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.679553 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data-merged\") pod \"ec5c8613-c88b-4cc5-8ad4-440e65523618\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.679608 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-scripts\") pod \"ec5c8613-c88b-4cc5-8ad4-440e65523618\" (UID: \"ec5c8613-c88b-4cc5-8ad4-440e65523618\") " Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.683117 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-scripts" (OuterVolumeSpecName: "scripts") pod "ec5c8613-c88b-4cc5-8ad4-440e65523618" (UID: "ec5c8613-c88b-4cc5-8ad4-440e65523618"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.683444 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "ec5c8613-c88b-4cc5-8ad4-440e65523618" (UID: "ec5c8613-c88b-4cc5-8ad4-440e65523618"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.686364 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/ec5c8613-c88b-4cc5-8ad4-440e65523618-etc-podinfo" (OuterVolumeSpecName: "etc-podinfo") pod "ec5c8613-c88b-4cc5-8ad4-440e65523618" (UID: "ec5c8613-c88b-4cc5-8ad4-440e65523618"). InnerVolumeSpecName "etc-podinfo". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.688041 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec5c8613-c88b-4cc5-8ad4-440e65523618-kube-api-access-99r6w" (OuterVolumeSpecName: "kube-api-access-99r6w") pod "ec5c8613-c88b-4cc5-8ad4-440e65523618" (UID: "ec5c8613-c88b-4cc5-8ad4-440e65523618"). InnerVolumeSpecName "kube-api-access-99r6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.709778 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data" (OuterVolumeSpecName: "config-data") pod "ec5c8613-c88b-4cc5-8ad4-440e65523618" (UID: "ec5c8613-c88b-4cc5-8ad4-440e65523618"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.737623 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec5c8613-c88b-4cc5-8ad4-440e65523618" (UID: "ec5c8613-c88b-4cc5-8ad4-440e65523618"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.781082 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.781117 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.781129 4708 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ec5c8613-c88b-4cc5-8ad4-440e65523618-config-data-merged\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.781137 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec5c8613-c88b-4cc5-8ad4-440e65523618-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.781145 4708 reconciler_common.go:293] "Volume detached for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/ec5c8613-c88b-4cc5-8ad4-440e65523618-etc-podinfo\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.781156 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99r6w\" (UniqueName: \"kubernetes.io/projected/ec5c8613-c88b-4cc5-8ad4-440e65523618-kube-api-access-99r6w\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:43 crc kubenswrapper[4708]: I0203 07:28:43.841560 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:28:43 crc kubenswrapper[4708]: W0203 07:28:43.861134 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3ffce46_abee_4152_a945_f60062643ac0.slice/crio-64be77b5931ae2c84bdd136c5b80aaffa5339bd1c96ab175cccba834eb17ac81 WatchSource:0}: Error finding container 64be77b5931ae2c84bdd136c5b80aaffa5339bd1c96ab175cccba834eb17ac81: Status 404 returned error can't find the container with id 64be77b5931ae2c84bdd136c5b80aaffa5339bd1c96ab175cccba834eb17ac81 Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.172949 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-67599f68dd-cgvwn" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.288284 4708 generic.go:334] "Generic (PLEG): container finished" podID="d80380a3-aaeb-40d6-a30f-ced06d3885d4" containerID="338b88f454f739a8b05c9ab18020e42ffcabd48f9599a383feba57c88d6f8dd0" exitCode=0 Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.288401 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" event={"ID":"d80380a3-aaeb-40d6-a30f-ced06d3885d4","Type":"ContainerDied","Data":"338b88f454f739a8b05c9ab18020e42ffcabd48f9599a383feba57c88d6f8dd0"} Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.288434 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" event={"ID":"d80380a3-aaeb-40d6-a30f-ced06d3885d4","Type":"ContainerStarted","Data":"63a136526da9ddb726feded4fe2f9e113debae7f3f9005736512bf2086ae501b"} Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.304617 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-db-sync-8w6c7" event={"ID":"ec5c8613-c88b-4cc5-8ad4-440e65523618","Type":"ContainerDied","Data":"785ab40b156d9f85aab9411f0c2c054ce53a59dc920e95658ed731c3ecddcf3d"} Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.304653 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="785ab40b156d9f85aab9411f0c2c054ce53a59dc920e95658ed731c3ecddcf3d" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.304736 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-db-sync-8w6c7" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.327851 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3aa4c5d7-28f2-4fa2-9430-4865754b335e","Type":"ContainerStarted","Data":"b45de0da0fc624bc80579b5ec95c09dc58036ec34f442d9ab4fec7468a543f23"} Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.327917 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3aa4c5d7-28f2-4fa2-9430-4865754b335e","Type":"ContainerStarted","Data":"03b075fb16dc6bb72666761a9bf466dc0cf6610a5e3ad65335540ced0501e8f9"} Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.390943 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"87d3a294-c4b6-4ddf-9f60-c6afede1752a","Type":"ContainerStarted","Data":"1f39bdfcf9124b095f03f5e2066d27f5cf8147e14a13269c2d7e13db3a4471c9"} Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.418902 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b3ffce46-abee-4152-a945-f60062643ac0","Type":"ContainerStarted","Data":"64be77b5931ae2c84bdd136c5b80aaffa5339bd1c96ab175cccba834eb17ac81"} Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.479437 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.576219 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-neutron-agent-95b7948fb-x2nkv"] Feb 03 07:28:44 crc kubenswrapper[4708]: E0203 07:28:44.576814 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec5c8613-c88b-4cc5-8ad4-440e65523618" containerName="init" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.576833 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec5c8613-c88b-4cc5-8ad4-440e65523618" containerName="init" Feb 03 07:28:44 crc kubenswrapper[4708]: E0203 07:28:44.576861 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec5c8613-c88b-4cc5-8ad4-440e65523618" containerName="ironic-db-sync" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.576870 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec5c8613-c88b-4cc5-8ad4-440e65523618" containerName="ironic-db-sync" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.577180 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec5c8613-c88b-4cc5-8ad4-440e65523618" containerName="ironic-db-sync" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.578106 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.591614 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-ironic-dockercfg-5lpxl" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.591822 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-ironic-neutron-agent-config-data" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.632722 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8882c\" (UniqueName: \"kubernetes.io/projected/aeb72dfd-3f7b-41fa-882f-3290c463fcbe-kube-api-access-8882c\") pod \"ironic-neutron-agent-95b7948fb-x2nkv\" (UID: \"aeb72dfd-3f7b-41fa-882f-3290c463fcbe\") " pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.633035 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aeb72dfd-3f7b-41fa-882f-3290c463fcbe-config\") pod \"ironic-neutron-agent-95b7948fb-x2nkv\" (UID: \"aeb72dfd-3f7b-41fa-882f-3290c463fcbe\") " pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.633090 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb72dfd-3f7b-41fa-882f-3290c463fcbe-combined-ca-bundle\") pod \"ironic-neutron-agent-95b7948fb-x2nkv\" (UID: \"aeb72dfd-3f7b-41fa-882f-3290c463fcbe\") " pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.704679 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-neutron-agent-95b7948fb-x2nkv"] Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.737106 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8882c\" (UniqueName: \"kubernetes.io/projected/aeb72dfd-3f7b-41fa-882f-3290c463fcbe-kube-api-access-8882c\") pod \"ironic-neutron-agent-95b7948fb-x2nkv\" (UID: \"aeb72dfd-3f7b-41fa-882f-3290c463fcbe\") " pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.737149 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aeb72dfd-3f7b-41fa-882f-3290c463fcbe-config\") pod \"ironic-neutron-agent-95b7948fb-x2nkv\" (UID: \"aeb72dfd-3f7b-41fa-882f-3290c463fcbe\") " pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.737194 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb72dfd-3f7b-41fa-882f-3290c463fcbe-combined-ca-bundle\") pod \"ironic-neutron-agent-95b7948fb-x2nkv\" (UID: \"aeb72dfd-3f7b-41fa-882f-3290c463fcbe\") " pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.745596 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/aeb72dfd-3f7b-41fa-882f-3290c463fcbe-config\") pod \"ironic-neutron-agent-95b7948fb-x2nkv\" (UID: \"aeb72dfd-3f7b-41fa-882f-3290c463fcbe\") " pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.791531 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb72dfd-3f7b-41fa-882f-3290c463fcbe-combined-ca-bundle\") pod \"ironic-neutron-agent-95b7948fb-x2nkv\" (UID: \"aeb72dfd-3f7b-41fa-882f-3290c463fcbe\") " pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.791596 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-inspector-db-create-xtlsr"] Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.794371 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8882c\" (UniqueName: \"kubernetes.io/projected/aeb72dfd-3f7b-41fa-882f-3290c463fcbe-kube-api-access-8882c\") pod \"ironic-neutron-agent-95b7948fb-x2nkv\" (UID: \"aeb72dfd-3f7b-41fa-882f-3290c463fcbe\") " pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.813940 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-create-xtlsr" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.824698 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-db-create-xtlsr"] Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.841074 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5853c69c-a64c-449e-ab86-1fcb400ddc60-operator-scripts\") pod \"ironic-inspector-db-create-xtlsr\" (UID: \"5853c69c-a64c-449e-ab86-1fcb400ddc60\") " pod="openstack/ironic-inspector-db-create-xtlsr" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.841122 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpqrh\" (UniqueName: \"kubernetes.io/projected/5853c69c-a64c-449e-ab86-1fcb400ddc60-kube-api-access-rpqrh\") pod \"ironic-inspector-db-create-xtlsr\" (UID: \"5853c69c-a64c-449e-ab86-1fcb400ddc60\") " pod="openstack/ironic-inspector-db-create-xtlsr" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.865258 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-inspector-e3db-account-create-update-cm9n8"] Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.874629 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.889145 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-db-secret" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.906912 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-e3db-account-create-update-cm9n8"] Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.929292 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-fc5cbdf64-524m4"] Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.931214 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.944891 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-api-config-data" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.944941 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-config-data" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.944901 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.945101 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-api-scripts" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.946237 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5853c69c-a64c-449e-ab86-1fcb400ddc60-operator-scripts\") pod \"ironic-inspector-db-create-xtlsr\" (UID: \"5853c69c-a64c-449e-ab86-1fcb400ddc60\") " pod="openstack/ironic-inspector-db-create-xtlsr" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.946258 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpqrh\" (UniqueName: \"kubernetes.io/projected/5853c69c-a64c-449e-ab86-1fcb400ddc60-kube-api-access-rpqrh\") pod \"ironic-inspector-db-create-xtlsr\" (UID: \"5853c69c-a64c-449e-ab86-1fcb400ddc60\") " pod="openstack/ironic-inspector-db-create-xtlsr" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.947161 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5853c69c-a64c-449e-ab86-1fcb400ddc60-operator-scripts\") pod \"ironic-inspector-db-create-xtlsr\" (UID: \"5853c69c-a64c-449e-ab86-1fcb400ddc60\") " pod="openstack/ironic-inspector-db-create-xtlsr" Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.953871 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-fc5cbdf64-524m4"] Feb 03 07:28:44 crc kubenswrapper[4708]: I0203 07:28:44.973207 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.007372 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpqrh\" (UniqueName: \"kubernetes.io/projected/5853c69c-a64c-449e-ab86-1fcb400ddc60-kube-api-access-rpqrh\") pod \"ironic-inspector-db-create-xtlsr\" (UID: \"5853c69c-a64c-449e-ab86-1fcb400ddc60\") " pod="openstack/ironic-inspector-db-create-xtlsr" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.048195 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-scripts\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.048435 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-combined-ca-bundle\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.048535 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-custom\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.048658 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/5d2f1707-b960-46ce-b412-6a16f8cc63c9-etc-podinfo\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.048769 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94489\" (UniqueName: \"kubernetes.io/projected/5d2f1707-b960-46ce-b412-6a16f8cc63c9-kube-api-access-94489\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.048886 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9da48053-326c-45cb-bf1c-fb8890642e13-operator-scripts\") pod \"ironic-inspector-e3db-account-create-update-cm9n8\" (UID: \"9da48053-326c-45cb-bf1c-fb8890642e13\") " pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.049008 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8x7z\" (UniqueName: \"kubernetes.io/projected/9da48053-326c-45cb-bf1c-fb8890642e13-kube-api-access-d8x7z\") pod \"ironic-inspector-e3db-account-create-update-cm9n8\" (UID: \"9da48053-326c-45cb-bf1c-fb8890642e13\") " pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.049099 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-merged\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.049196 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-logs\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.049539 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.150724 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-scripts\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.150782 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-combined-ca-bundle\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.150822 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-custom\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.150848 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/5d2f1707-b960-46ce-b412-6a16f8cc63c9-etc-podinfo\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.150904 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94489\" (UniqueName: \"kubernetes.io/projected/5d2f1707-b960-46ce-b412-6a16f8cc63c9-kube-api-access-94489\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.150937 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9da48053-326c-45cb-bf1c-fb8890642e13-operator-scripts\") pod \"ironic-inspector-e3db-account-create-update-cm9n8\" (UID: \"9da48053-326c-45cb-bf1c-fb8890642e13\") " pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.150963 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8x7z\" (UniqueName: \"kubernetes.io/projected/9da48053-326c-45cb-bf1c-fb8890642e13-kube-api-access-d8x7z\") pod \"ironic-inspector-e3db-account-create-update-cm9n8\" (UID: \"9da48053-326c-45cb-bf1c-fb8890642e13\") " pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.150983 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-merged\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.151012 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-logs\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.151042 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.155366 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9da48053-326c-45cb-bf1c-fb8890642e13-operator-scripts\") pod \"ironic-inspector-e3db-account-create-update-cm9n8\" (UID: \"9da48053-326c-45cb-bf1c-fb8890642e13\") " pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.161453 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-scripts\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.162350 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-merged\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.162430 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-logs\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.174499 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-combined-ca-bundle\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.175407 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-custom\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.177313 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/5d2f1707-b960-46ce-b412-6a16f8cc63c9-etc-podinfo\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.188524 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8x7z\" (UniqueName: \"kubernetes.io/projected/9da48053-326c-45cb-bf1c-fb8890642e13-kube-api-access-d8x7z\") pod \"ironic-inspector-e3db-account-create-update-cm9n8\" (UID: \"9da48053-326c-45cb-bf1c-fb8890642e13\") " pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.198782 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94489\" (UniqueName: \"kubernetes.io/projected/5d2f1707-b960-46ce-b412-6a16f8cc63c9-kube-api-access-94489\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.200881 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data\") pod \"ironic-fc5cbdf64-524m4\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.297358 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-create-xtlsr" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.484271 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.502261 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.748450 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-conductor-0"] Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.782180 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-conductor-0" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.787218 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-conductor-scripts" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.787867 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-conductor-config-data" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.792757 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-conductor-0"] Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.865270 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-neutron-agent-95b7948fb-x2nkv"] Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.975389 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhchh\" (UniqueName: \"kubernetes.io/projected/361821ae-c957-4e31-bb9b-6d659aaceec4-kube-api-access-dhchh\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.975466 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-config-data\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.975517 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/361821ae-c957-4e31-bb9b-6d659aaceec4-etc-podinfo\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.975545 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-scripts\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.975562 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-config-data-custom\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.975593 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/361821ae-c957-4e31-bb9b-6d659aaceec4-config-data-merged\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.975612 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:45 crc kubenswrapper[4708]: I0203 07:28:45.975632 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-combined-ca-bundle\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.077600 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhchh\" (UniqueName: \"kubernetes.io/projected/361821ae-c957-4e31-bb9b-6d659aaceec4-kube-api-access-dhchh\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.077954 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-config-data\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.077980 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/361821ae-c957-4e31-bb9b-6d659aaceec4-etc-podinfo\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.078006 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-scripts\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.078023 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-config-data-custom\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.078039 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/361821ae-c957-4e31-bb9b-6d659aaceec4-config-data-merged\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.078055 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.078077 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-combined-ca-bundle\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.079354 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/361821ae-c957-4e31-bb9b-6d659aaceec4-config-data-merged\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.081259 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.087874 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-scripts\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.093040 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-config-data\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.112456 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/361821ae-c957-4e31-bb9b-6d659aaceec4-etc-podinfo\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.134033 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-combined-ca-bundle\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.136604 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/361821ae-c957-4e31-bb9b-6d659aaceec4-config-data-custom\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.137152 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhchh\" (UniqueName: \"kubernetes.io/projected/361821ae-c957-4e31-bb9b-6d659aaceec4-kube-api-access-dhchh\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.185547 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ironic-conductor-0\" (UID: \"361821ae-c957-4e31-bb9b-6d659aaceec4\") " pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.265191 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-db-create-xtlsr"] Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.317378 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-conductor-0" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.448569 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-e3db-account-create-update-cm9n8"] Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.518531 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" event={"ID":"aeb72dfd-3f7b-41fa-882f-3290c463fcbe","Type":"ContainerStarted","Data":"1286d750d00484327e95cffdeedc8a1b842ca4e8d10e8db81a5a5e57ff64c6d9"} Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.522148 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-fc5cbdf64-524m4"] Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.537763 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b3ffce46-abee-4152-a945-f60062643ac0","Type":"ContainerStarted","Data":"b96e730e2abb9e29a458888d0e5aa375cacdad60fe39f1bf169b0e8505019001"} Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.584956 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" event={"ID":"d80380a3-aaeb-40d6-a30f-ced06d3885d4","Type":"ContainerStarted","Data":"f6b54db189c22f26b17e6f3aa38cceaedb4078610ed39e2b0b3a3ca86bbb4050"} Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.585624 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.597602 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"87d3a294-c4b6-4ddf-9f60-c6afede1752a","Type":"ContainerStarted","Data":"817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e"} Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.629010 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" podStartSLOduration=4.62898585 podStartE2EDuration="4.62898585s" podCreationTimestamp="2026-02-03 07:28:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:46.610134344 +0000 UTC m=+1105.592081151" watchObservedRunningTime="2026-02-03 07:28:46.62898585 +0000 UTC m=+1105.610932677" Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.650017 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-create-xtlsr" event={"ID":"5853c69c-a64c-449e-ab86-1fcb400ddc60","Type":"ContainerStarted","Data":"fcb3ce1ddf9d5555947d4df50c956c30470f2c75105b284a5fd391f11da1d47e"} Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.934816 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-conductor-0"] Feb 03 07:28:46 crc kubenswrapper[4708]: I0203 07:28:46.956564 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.381444 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7ddbc898b8-cqt5j" Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.497114 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6f676fd47d-s9mvl" Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.628389 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5dd9656794-5cgwc"] Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.628680 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5dd9656794-5cgwc" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api-log" containerID="cri-o://42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29" gracePeriod=30 Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.629882 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5dd9656794-5cgwc" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api" containerID="cri-o://fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489" gracePeriod=30 Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.684467 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3aa4c5d7-28f2-4fa2-9430-4865754b335e","Type":"ContainerStarted","Data":"2cf24d631a6d0e8fd15fc3665ff572f69a128fdbd5933872e3f6d8a68b6a8745"} Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.685716 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.693007 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"87d3a294-c4b6-4ddf-9f60-c6afede1752a","Type":"ContainerStarted","Data":"d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4"} Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.695063 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-fc5cbdf64-524m4" event={"ID":"5d2f1707-b960-46ce-b412-6a16f8cc63c9","Type":"ContainerStarted","Data":"b52dab6d7b406f0d72c04fbc3f85b910abdb79a2cf75e4dc633945e70777452c"} Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.696142 4708 generic.go:334] "Generic (PLEG): container finished" podID="5853c69c-a64c-449e-ab86-1fcb400ddc60" containerID="666c235470d7c2c9ee98b31cd2eb1e9a24ca7befd94ced667adfe14633fefd25" exitCode=0 Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.696194 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-create-xtlsr" event={"ID":"5853c69c-a64c-449e-ab86-1fcb400ddc60","Type":"ContainerDied","Data":"666c235470d7c2c9ee98b31cd2eb1e9a24ca7befd94ced667adfe14633fefd25"} Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.706613 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" event={"ID":"9da48053-326c-45cb-bf1c-fb8890642e13","Type":"ContainerStarted","Data":"d05044f5e78f95c7b2ed6a411fd0306cbd5f73e1dc075598a9f384118a58bc8f"} Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.706656 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" event={"ID":"9da48053-326c-45cb-bf1c-fb8890642e13","Type":"ContainerStarted","Data":"40afad37af355e48377638a0f7f622675915dcdc189929f313ab43e55a790950"} Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.716874 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.739901 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"361821ae-c957-4e31-bb9b-6d659aaceec4","Type":"ContainerStarted","Data":"154b5f4cb253ed61421df266239ffc77647cde1f3c0e1883021f8a8ad429d851"} Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.768517 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.863029963 podStartE2EDuration="6.768494345s" podCreationTimestamp="2026-02-03 07:28:41 +0000 UTC" firstStartedPulling="2026-02-03 07:28:41.959984805 +0000 UTC m=+1100.941931612" lastFinishedPulling="2026-02-03 07:28:46.865449197 +0000 UTC m=+1105.847395994" observedRunningTime="2026-02-03 07:28:47.706176645 +0000 UTC m=+1106.688123452" watchObservedRunningTime="2026-02-03 07:28:47.768494345 +0000 UTC m=+1106.750441142" Feb 03 07:28:47 crc kubenswrapper[4708]: I0203 07:28:47.768952 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.725445622 podStartE2EDuration="5.768947626s" podCreationTimestamp="2026-02-03 07:28:42 +0000 UTC" firstStartedPulling="2026-02-03 07:28:43.305944012 +0000 UTC m=+1102.287890809" lastFinishedPulling="2026-02-03 07:28:44.349446006 +0000 UTC m=+1103.331392813" observedRunningTime="2026-02-03 07:28:47.727195504 +0000 UTC m=+1106.709142311" watchObservedRunningTime="2026-02-03 07:28:47.768947626 +0000 UTC m=+1106.750894433" Feb 03 07:28:47 crc kubenswrapper[4708]: E0203 07:28:47.950867 4708 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9da48053_326c_45cb_bf1c_fb8890642e13.slice/crio-d05044f5e78f95c7b2ed6a411fd0306cbd5f73e1dc075598a9f384118a58bc8f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9492f917_fe6a_4076_a2f3_7d43ebee25e0.slice/crio-conmon-42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9da48053_326c_45cb_bf1c_fb8890642e13.slice/crio-conmon-d05044f5e78f95c7b2ed6a411fd0306cbd5f73e1dc075598a9f384118a58bc8f.scope\": RecentStats: unable to find data in memory cache]" Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.749936 4708 generic.go:334] "Generic (PLEG): container finished" podID="9da48053-326c-45cb-bf1c-fb8890642e13" containerID="d05044f5e78f95c7b2ed6a411fd0306cbd5f73e1dc075598a9f384118a58bc8f" exitCode=0 Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.750087 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" event={"ID":"9da48053-326c-45cb-bf1c-fb8890642e13","Type":"ContainerDied","Data":"d05044f5e78f95c7b2ed6a411fd0306cbd5f73e1dc075598a9f384118a58bc8f"} Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.752875 4708 generic.go:334] "Generic (PLEG): container finished" podID="361821ae-c957-4e31-bb9b-6d659aaceec4" containerID="47e16aa6d69cc32119da28aaa021060180d51fdced172efdde3bfd66e277d05a" exitCode=0 Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.752950 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"361821ae-c957-4e31-bb9b-6d659aaceec4","Type":"ContainerDied","Data":"47e16aa6d69cc32119da28aaa021060180d51fdced172efdde3bfd66e277d05a"} Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.757211 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b3ffce46-abee-4152-a945-f60062643ac0","Type":"ContainerStarted","Data":"58d14444c4433add882bd3ba8df1dd98771ab7eef7ceacad3b5084fa8734a29d"} Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.757344 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b3ffce46-abee-4152-a945-f60062643ac0" containerName="cinder-api-log" containerID="cri-o://b96e730e2abb9e29a458888d0e5aa375cacdad60fe39f1bf169b0e8505019001" gracePeriod=30 Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.757404 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.757431 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b3ffce46-abee-4152-a945-f60062643ac0" containerName="cinder-api" containerID="cri-o://58d14444c4433add882bd3ba8df1dd98771ab7eef7ceacad3b5084fa8734a29d" gracePeriod=30 Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.772452 4708 generic.go:334] "Generic (PLEG): container finished" podID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerID="42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29" exitCode=143 Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.773552 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dd9656794-5cgwc" event={"ID":"9492f917-fe6a-4076-a2f3-7d43ebee25e0","Type":"ContainerDied","Data":"42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29"} Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.834294 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.834275647 podStartE2EDuration="6.834275647s" podCreationTimestamp="2026-02-03 07:28:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:48.820810755 +0000 UTC m=+1107.802757562" watchObservedRunningTime="2026-02-03 07:28:48.834275647 +0000 UTC m=+1107.816222454" Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.914849 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-5fffdc6c76-m5s5d"] Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.921096 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.924696 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ironic-internal-svc" Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.925925 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-5fffdc6c76-m5s5d"] Feb 03 07:28:48 crc kubenswrapper[4708]: I0203 07:28:48.932404 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ironic-public-svc" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.057266 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-config-data\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.057320 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr58t\" (UniqueName: \"kubernetes.io/projected/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-kube-api-access-fr58t\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.057357 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-public-tls-certs\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.057389 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-config-data-merged\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.057410 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-logs\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.057469 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-config-data-custom\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.057509 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-combined-ca-bundle\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.057543 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-etc-podinfo\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.057631 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-internal-tls-certs\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.057731 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-scripts\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.161762 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-scripts\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.161837 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-config-data\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.161863 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr58t\" (UniqueName: \"kubernetes.io/projected/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-kube-api-access-fr58t\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.161885 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-public-tls-certs\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.161904 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-config-data-merged\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.161919 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-logs\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.161940 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-config-data-custom\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.161965 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-combined-ca-bundle\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.161992 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-etc-podinfo\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.162016 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-internal-tls-certs\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.163537 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-config-data-merged\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.166924 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-logs\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.168205 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-scripts\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.171468 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-internal-tls-certs\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.176528 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-etc-podinfo\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.176685 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-config-data\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.177307 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-combined-ca-bundle\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.180661 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-config-data-custom\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.185706 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-public-tls-certs\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.215687 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr58t\" (UniqueName: \"kubernetes.io/projected/e7212cfb-233f-4a09-ae76-fcfe61a4ed14-kube-api-access-fr58t\") pod \"ironic-5fffdc6c76-m5s5d\" (UID: \"e7212cfb-233f-4a09-ae76-fcfe61a4ed14\") " pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.256807 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.744872 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-create-xtlsr" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.757610 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.840612 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-create-xtlsr" event={"ID":"5853c69c-a64c-449e-ab86-1fcb400ddc60","Type":"ContainerDied","Data":"fcb3ce1ddf9d5555947d4df50c956c30470f2c75105b284a5fd391f11da1d47e"} Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.841400 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcb3ce1ddf9d5555947d4df50c956c30470f2c75105b284a5fd391f11da1d47e" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.841493 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-create-xtlsr" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.856215 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.856251 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-e3db-account-create-update-cm9n8" event={"ID":"9da48053-326c-45cb-bf1c-fb8890642e13","Type":"ContainerDied","Data":"40afad37af355e48377638a0f7f622675915dcdc189929f313ab43e55a790950"} Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.856312 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40afad37af355e48377638a0f7f622675915dcdc189929f313ab43e55a790950" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.861247 4708 generic.go:334] "Generic (PLEG): container finished" podID="b3ffce46-abee-4152-a945-f60062643ac0" containerID="58d14444c4433add882bd3ba8df1dd98771ab7eef7ceacad3b5084fa8734a29d" exitCode=0 Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.861312 4708 generic.go:334] "Generic (PLEG): container finished" podID="b3ffce46-abee-4152-a945-f60062643ac0" containerID="b96e730e2abb9e29a458888d0e5aa375cacdad60fe39f1bf169b0e8505019001" exitCode=143 Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.861351 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b3ffce46-abee-4152-a945-f60062643ac0","Type":"ContainerDied","Data":"58d14444c4433add882bd3ba8df1dd98771ab7eef7ceacad3b5084fa8734a29d"} Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.861403 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b3ffce46-abee-4152-a945-f60062643ac0","Type":"ContainerDied","Data":"b96e730e2abb9e29a458888d0e5aa375cacdad60fe39f1bf169b0e8505019001"} Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.883291 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5853c69c-a64c-449e-ab86-1fcb400ddc60-operator-scripts\") pod \"5853c69c-a64c-449e-ab86-1fcb400ddc60\" (UID: \"5853c69c-a64c-449e-ab86-1fcb400ddc60\") " Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.883461 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9da48053-326c-45cb-bf1c-fb8890642e13-operator-scripts\") pod \"9da48053-326c-45cb-bf1c-fb8890642e13\" (UID: \"9da48053-326c-45cb-bf1c-fb8890642e13\") " Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.883577 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8x7z\" (UniqueName: \"kubernetes.io/projected/9da48053-326c-45cb-bf1c-fb8890642e13-kube-api-access-d8x7z\") pod \"9da48053-326c-45cb-bf1c-fb8890642e13\" (UID: \"9da48053-326c-45cb-bf1c-fb8890642e13\") " Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.883685 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpqrh\" (UniqueName: \"kubernetes.io/projected/5853c69c-a64c-449e-ab86-1fcb400ddc60-kube-api-access-rpqrh\") pod \"5853c69c-a64c-449e-ab86-1fcb400ddc60\" (UID: \"5853c69c-a64c-449e-ab86-1fcb400ddc60\") " Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.883912 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5853c69c-a64c-449e-ab86-1fcb400ddc60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5853c69c-a64c-449e-ab86-1fcb400ddc60" (UID: "5853c69c-a64c-449e-ab86-1fcb400ddc60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.884063 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9da48053-326c-45cb-bf1c-fb8890642e13-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9da48053-326c-45cb-bf1c-fb8890642e13" (UID: "9da48053-326c-45cb-bf1c-fb8890642e13"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.884373 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5853c69c-a64c-449e-ab86-1fcb400ddc60-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.884386 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9da48053-326c-45cb-bf1c-fb8890642e13-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.891077 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5853c69c-a64c-449e-ab86-1fcb400ddc60-kube-api-access-rpqrh" (OuterVolumeSpecName: "kube-api-access-rpqrh") pod "5853c69c-a64c-449e-ab86-1fcb400ddc60" (UID: "5853c69c-a64c-449e-ab86-1fcb400ddc60"). InnerVolumeSpecName "kube-api-access-rpqrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:49 crc kubenswrapper[4708]: I0203 07:28:49.893273 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9da48053-326c-45cb-bf1c-fb8890642e13-kube-api-access-d8x7z" (OuterVolumeSpecName: "kube-api-access-d8x7z") pod "9da48053-326c-45cb-bf1c-fb8890642e13" (UID: "9da48053-326c-45cb-bf1c-fb8890642e13"). InnerVolumeSpecName "kube-api-access-d8x7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.010428 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8x7z\" (UniqueName: \"kubernetes.io/projected/9da48053-326c-45cb-bf1c-fb8890642e13-kube-api-access-d8x7z\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.010661 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpqrh\" (UniqueName: \"kubernetes.io/projected/5853c69c-a64c-449e-ab86-1fcb400ddc60-kube-api-access-rpqrh\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.720064 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 03 07:28:50 crc kubenswrapper[4708]: E0203 07:28:50.720676 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da48053-326c-45cb-bf1c-fb8890642e13" containerName="mariadb-account-create-update" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.720693 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da48053-326c-45cb-bf1c-fb8890642e13" containerName="mariadb-account-create-update" Feb 03 07:28:50 crc kubenswrapper[4708]: E0203 07:28:50.720706 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5853c69c-a64c-449e-ab86-1fcb400ddc60" containerName="mariadb-database-create" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.720712 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="5853c69c-a64c-449e-ab86-1fcb400ddc60" containerName="mariadb-database-create" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.720910 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="5853c69c-a64c-449e-ab86-1fcb400ddc60" containerName="mariadb-database-create" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.720938 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="9da48053-326c-45cb-bf1c-fb8890642e13" containerName="mariadb-account-create-update" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.721480 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.723321 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.723612 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-9w7ql" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.724067 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.729691 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.826823 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-openstack-config-secret\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.826970 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-openstack-config\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.827042 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-combined-ca-bundle\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.827109 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9t44\" (UniqueName: \"kubernetes.io/projected/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-kube-api-access-b9t44\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.930953 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-openstack-config-secret\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.931024 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-openstack-config\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.931056 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-combined-ca-bundle\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.931081 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9t44\" (UniqueName: \"kubernetes.io/projected/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-kube-api-access-b9t44\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.932907 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-openstack-config\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.940120 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-openstack-config-secret\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.940291 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-combined-ca-bundle\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:50 crc kubenswrapper[4708]: I0203 07:28:50.946577 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9t44\" (UniqueName: \"kubernetes.io/projected/db624ad8-1c0f-4100-b3a2-4c80e02c1b03-kube-api-access-b9t44\") pod \"openstackclient\" (UID: \"db624ad8-1c0f-4100-b3a2-4c80e02c1b03\") " pod="openstack/openstackclient" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.051833 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.122281 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.248119 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk9gm\" (UniqueName: \"kubernetes.io/projected/b3ffce46-abee-4152-a945-f60062643ac0-kube-api-access-zk9gm\") pod \"b3ffce46-abee-4152-a945-f60062643ac0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.248624 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b3ffce46-abee-4152-a945-f60062643ac0-etc-machine-id\") pod \"b3ffce46-abee-4152-a945-f60062643ac0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.248653 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3ffce46-abee-4152-a945-f60062643ac0-logs\") pod \"b3ffce46-abee-4152-a945-f60062643ac0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.248737 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data-custom\") pod \"b3ffce46-abee-4152-a945-f60062643ac0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.248749 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3ffce46-abee-4152-a945-f60062643ac0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b3ffce46-abee-4152-a945-f60062643ac0" (UID: "b3ffce46-abee-4152-a945-f60062643ac0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.248828 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-combined-ca-bundle\") pod \"b3ffce46-abee-4152-a945-f60062643ac0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.248923 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data\") pod \"b3ffce46-abee-4152-a945-f60062643ac0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.248951 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-scripts\") pod \"b3ffce46-abee-4152-a945-f60062643ac0\" (UID: \"b3ffce46-abee-4152-a945-f60062643ac0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.249130 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3ffce46-abee-4152-a945-f60062643ac0-logs" (OuterVolumeSpecName: "logs") pod "b3ffce46-abee-4152-a945-f60062643ac0" (UID: "b3ffce46-abee-4152-a945-f60062643ac0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.250184 4708 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b3ffce46-abee-4152-a945-f60062643ac0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.250204 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3ffce46-abee-4152-a945-f60062643ac0-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.254827 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-scripts" (OuterVolumeSpecName: "scripts") pod "b3ffce46-abee-4152-a945-f60062643ac0" (UID: "b3ffce46-abee-4152-a945-f60062643ac0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.256067 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3ffce46-abee-4152-a945-f60062643ac0-kube-api-access-zk9gm" (OuterVolumeSpecName: "kube-api-access-zk9gm") pod "b3ffce46-abee-4152-a945-f60062643ac0" (UID: "b3ffce46-abee-4152-a945-f60062643ac0"). InnerVolumeSpecName "kube-api-access-zk9gm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.269181 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b3ffce46-abee-4152-a945-f60062643ac0" (UID: "b3ffce46-abee-4152-a945-f60062643ac0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.308933 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3ffce46-abee-4152-a945-f60062643ac0" (UID: "b3ffce46-abee-4152-a945-f60062643ac0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.341306 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data" (OuterVolumeSpecName: "config-data") pod "b3ffce46-abee-4152-a945-f60062643ac0" (UID: "b3ffce46-abee-4152-a945-f60062643ac0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.353362 4708 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.353400 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.353413 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.353424 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3ffce46-abee-4152-a945-f60062643ac0-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.353434 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk9gm\" (UniqueName: \"kubernetes.io/projected/b3ffce46-abee-4152-a945-f60062643ac0-kube-api-access-zk9gm\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.443867 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.556667 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-combined-ca-bundle\") pod \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.556772 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9dnn\" (UniqueName: \"kubernetes.io/projected/9492f917-fe6a-4076-a2f3-7d43ebee25e0-kube-api-access-h9dnn\") pod \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.556846 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9492f917-fe6a-4076-a2f3-7d43ebee25e0-logs\") pod \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.556880 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data\") pod \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.556914 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data-custom\") pod \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\" (UID: \"9492f917-fe6a-4076-a2f3-7d43ebee25e0\") " Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.558422 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9492f917-fe6a-4076-a2f3-7d43ebee25e0-logs" (OuterVolumeSpecName: "logs") pod "9492f917-fe6a-4076-a2f3-7d43ebee25e0" (UID: "9492f917-fe6a-4076-a2f3-7d43ebee25e0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.562843 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9492f917-fe6a-4076-a2f3-7d43ebee25e0-kube-api-access-h9dnn" (OuterVolumeSpecName: "kube-api-access-h9dnn") pod "9492f917-fe6a-4076-a2f3-7d43ebee25e0" (UID: "9492f917-fe6a-4076-a2f3-7d43ebee25e0"). InnerVolumeSpecName "kube-api-access-h9dnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.567990 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9492f917-fe6a-4076-a2f3-7d43ebee25e0" (UID: "9492f917-fe6a-4076-a2f3-7d43ebee25e0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.594294 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-5fffdc6c76-m5s5d"] Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.594424 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9492f917-fe6a-4076-a2f3-7d43ebee25e0" (UID: "9492f917-fe6a-4076-a2f3-7d43ebee25e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.627285 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data" (OuterVolumeSpecName: "config-data") pod "9492f917-fe6a-4076-a2f3-7d43ebee25e0" (UID: "9492f917-fe6a-4076-a2f3-7d43ebee25e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.660085 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9492f917-fe6a-4076-a2f3-7d43ebee25e0-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.660121 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.660130 4708 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.660141 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9492f917-fe6a-4076-a2f3-7d43ebee25e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.660152 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9dnn\" (UniqueName: \"kubernetes.io/projected/9492f917-fe6a-4076-a2f3-7d43ebee25e0-kube-api-access-h9dnn\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.721449 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 03 07:28:51 crc kubenswrapper[4708]: W0203 07:28:51.737066 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb624ad8_1c0f_4100_b3a2_4c80e02c1b03.slice/crio-0af1271e38444b822bc317a3bc9b62185680d45376a451c235c0d214351f2d1b WatchSource:0}: Error finding container 0af1271e38444b822bc317a3bc9b62185680d45376a451c235c0d214351f2d1b: Status 404 returned error can't find the container with id 0af1271e38444b822bc317a3bc9b62185680d45376a451c235c0d214351f2d1b Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.882760 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"db624ad8-1c0f-4100-b3a2-4c80e02c1b03","Type":"ContainerStarted","Data":"0af1271e38444b822bc317a3bc9b62185680d45376a451c235c0d214351f2d1b"} Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.889531 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" event={"ID":"aeb72dfd-3f7b-41fa-882f-3290c463fcbe","Type":"ContainerStarted","Data":"744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254"} Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.889639 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.893760 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-5fffdc6c76-m5s5d" event={"ID":"e7212cfb-233f-4a09-ae76-fcfe61a4ed14","Type":"ContainerStarted","Data":"e9ff5e7f9d327b541b627223ebeecadde2a844bf9115b4c17091a5054a69e463"} Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.893840 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-5fffdc6c76-m5s5d" event={"ID":"e7212cfb-233f-4a09-ae76-fcfe61a4ed14","Type":"ContainerStarted","Data":"e7b85f818cb1b09e6a53b1db6577ac3b7a42cf37395361ad9a11fc4d846e8e9b"} Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.896597 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b3ffce46-abee-4152-a945-f60062643ac0","Type":"ContainerDied","Data":"64be77b5931ae2c84bdd136c5b80aaffa5339bd1c96ab175cccba834eb17ac81"} Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.896637 4708 scope.go:117] "RemoveContainer" containerID="58d14444c4433add882bd3ba8df1dd98771ab7eef7ceacad3b5084fa8734a29d" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.896755 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.902049 4708 generic.go:334] "Generic (PLEG): container finished" podID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerID="fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489" exitCode=0 Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.902144 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dd9656794-5cgwc" event={"ID":"9492f917-fe6a-4076-a2f3-7d43ebee25e0","Type":"ContainerDied","Data":"fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489"} Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.902160 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dd9656794-5cgwc" event={"ID":"9492f917-fe6a-4076-a2f3-7d43ebee25e0","Type":"ContainerDied","Data":"6d21445b62859abebcb8f004314d9758e6343e098ca69100bbcfb759e205be5d"} Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.902205 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dd9656794-5cgwc" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.912951 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-fc5cbdf64-524m4" event={"ID":"5d2f1707-b960-46ce-b412-6a16f8cc63c9","Type":"ContainerStarted","Data":"7112781cecec0ce36b5ad7ca5b033e477f7619ca067c4dc131cd12774f598f86"} Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.918597 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" podStartSLOduration=2.813485642 podStartE2EDuration="7.918548608s" podCreationTimestamp="2026-02-03 07:28:44 +0000 UTC" firstStartedPulling="2026-02-03 07:28:45.902571319 +0000 UTC m=+1104.884518126" lastFinishedPulling="2026-02-03 07:28:51.007634285 +0000 UTC m=+1109.989581092" observedRunningTime="2026-02-03 07:28:51.913818972 +0000 UTC m=+1110.895765779" watchObservedRunningTime="2026-02-03 07:28:51.918548608 +0000 UTC m=+1110.900495415" Feb 03 07:28:51 crc kubenswrapper[4708]: I0203 07:28:51.950144 4708 scope.go:117] "RemoveContainer" containerID="b96e730e2abb9e29a458888d0e5aa375cacdad60fe39f1bf169b0e8505019001" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.003895 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5dd9656794-5cgwc"] Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.010857 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5dd9656794-5cgwc"] Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.032067 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.046391 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.059868 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:28:52 crc kubenswrapper[4708]: E0203 07:28:52.060336 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ffce46-abee-4152-a945-f60062643ac0" containerName="cinder-api" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.060353 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ffce46-abee-4152-a945-f60062643ac0" containerName="cinder-api" Feb 03 07:28:52 crc kubenswrapper[4708]: E0203 07:28:52.060397 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.060406 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api" Feb 03 07:28:52 crc kubenswrapper[4708]: E0203 07:28:52.060420 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api-log" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.060426 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api-log" Feb 03 07:28:52 crc kubenswrapper[4708]: E0203 07:28:52.060436 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ffce46-abee-4152-a945-f60062643ac0" containerName="cinder-api-log" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.060442 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ffce46-abee-4152-a945-f60062643ac0" containerName="cinder-api-log" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.060657 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3ffce46-abee-4152-a945-f60062643ac0" containerName="cinder-api-log" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.060676 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.060685 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" containerName="barbican-api-log" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.060695 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3ffce46-abee-4152-a945-f60062643ac0" containerName="cinder-api" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.073267 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.074643 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.084188 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.084463 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.084633 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.091404 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e8626bc3-c20f-47d2-b183-9d27e9ec814c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.091470 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8626bc3-c20f-47d2-b183-9d27e9ec814c-logs\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.091658 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-config-data\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.091894 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-scripts\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.092004 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.092071 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-config-data-custom\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.092243 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.092339 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xng8w\" (UniqueName: \"kubernetes.io/projected/e8626bc3-c20f-47d2-b183-9d27e9ec814c-kube-api-access-xng8w\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.092400 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.119669 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9492f917-fe6a-4076-a2f3-7d43ebee25e0" path="/var/lib/kubelet/pods/9492f917-fe6a-4076-a2f3-7d43ebee25e0/volumes" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.120355 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3ffce46-abee-4152-a945-f60062643ac0" path="/var/lib/kubelet/pods/b3ffce46-abee-4152-a945-f60062643ac0/volumes" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.194449 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.194498 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e8626bc3-c20f-47d2-b183-9d27e9ec814c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.194524 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8626bc3-c20f-47d2-b183-9d27e9ec814c-logs\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.194575 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-config-data\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.194931 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8626bc3-c20f-47d2-b183-9d27e9ec814c-logs\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.195155 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-scripts\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.195222 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.195253 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-config-data-custom\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.195315 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.195362 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xng8w\" (UniqueName: \"kubernetes.io/projected/e8626bc3-c20f-47d2-b183-9d27e9ec814c-kube-api-access-xng8w\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.195162 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e8626bc3-c20f-47d2-b183-9d27e9ec814c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.204859 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.204969 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-config-data-custom\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.205668 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-config-data\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.211851 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xng8w\" (UniqueName: \"kubernetes.io/projected/e8626bc3-c20f-47d2-b183-9d27e9ec814c-kube-api-access-xng8w\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.212353 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-scripts\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.214145 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.217360 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8626bc3-c20f-47d2-b183-9d27e9ec814c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e8626bc3-c20f-47d2-b183-9d27e9ec814c\") " pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.333337 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.418318 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.855010 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.937446 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-mmcjs"] Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.937749 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" podUID="68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" containerName="dnsmasq-dns" containerID="cri-o://6835d377390d09c23b80658fb15aeb1b1ea362aac4def772f32f47bbed819553" gracePeriod=10 Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.968252 4708 generic.go:334] "Generic (PLEG): container finished" podID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerID="7112781cecec0ce36b5ad7ca5b033e477f7619ca067c4dc131cd12774f598f86" exitCode=0 Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.968323 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-fc5cbdf64-524m4" event={"ID":"5d2f1707-b960-46ce-b412-6a16f8cc63c9","Type":"ContainerDied","Data":"7112781cecec0ce36b5ad7ca5b033e477f7619ca067c4dc131cd12774f598f86"} Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.976146 4708 generic.go:334] "Generic (PLEG): container finished" podID="e7212cfb-233f-4a09-ae76-fcfe61a4ed14" containerID="e9ff5e7f9d327b541b627223ebeecadde2a844bf9115b4c17091a5054a69e463" exitCode=0 Feb 03 07:28:52 crc kubenswrapper[4708]: I0203 07:28:52.976311 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-5fffdc6c76-m5s5d" event={"ID":"e7212cfb-233f-4a09-ae76-fcfe61a4ed14","Type":"ContainerDied","Data":"e9ff5e7f9d327b541b627223ebeecadde2a844bf9115b4c17091a5054a69e463"} Feb 03 07:28:53 crc kubenswrapper[4708]: I0203 07:28:53.183475 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 03 07:28:53 crc kubenswrapper[4708]: I0203 07:28:53.255905 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:28:53 crc kubenswrapper[4708]: I0203 07:28:53.988736 4708 generic.go:334] "Generic (PLEG): container finished" podID="68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" containerID="6835d377390d09c23b80658fb15aeb1b1ea362aac4def772f32f47bbed819553" exitCode=0 Feb 03 07:28:53 crc kubenswrapper[4708]: I0203 07:28:53.989283 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" containerName="cinder-scheduler" containerID="cri-o://817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e" gracePeriod=30 Feb 03 07:28:53 crc kubenswrapper[4708]: I0203 07:28:53.988835 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" event={"ID":"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34","Type":"ContainerDied","Data":"6835d377390d09c23b80658fb15aeb1b1ea362aac4def772f32f47bbed819553"} Feb 03 07:28:53 crc kubenswrapper[4708]: I0203 07:28:53.989648 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" containerName="probe" containerID="cri-o://d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4" gracePeriod=30 Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.354301 4708 scope.go:117] "RemoveContainer" containerID="fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.416101 4708 scope.go:117] "RemoveContainer" containerID="42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.510468 4708 scope.go:117] "RemoveContainer" containerID="fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489" Feb 03 07:28:54 crc kubenswrapper[4708]: E0203 07:28:54.511214 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489\": container with ID starting with fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489 not found: ID does not exist" containerID="fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.511257 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489"} err="failed to get container status \"fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489\": rpc error: code = NotFound desc = could not find container \"fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489\": container with ID starting with fd0d88ae537cca31d39e625cb4b32d42cf4e73a387a668ea4137639e966bf489 not found: ID does not exist" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.511283 4708 scope.go:117] "RemoveContainer" containerID="42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29" Feb 03 07:28:54 crc kubenswrapper[4708]: E0203 07:28:54.514564 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29\": container with ID starting with 42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29 not found: ID does not exist" containerID="42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.514616 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29"} err="failed to get container status \"42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29\": rpc error: code = NotFound desc = could not find container \"42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29\": container with ID starting with 42af5751558b8a73e0a6c6aab781465d1a6e689ae2d065a4641c8aef078c3d29 not found: ID does not exist" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.698703 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.766780 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s746k\" (UniqueName: \"kubernetes.io/projected/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-kube-api-access-s746k\") pod \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.767200 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-nb\") pod \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.767259 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-config\") pod \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.767284 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-sb\") pod \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.767310 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-svc\") pod \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.767375 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-swift-storage-0\") pod \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\" (UID: \"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34\") " Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.777747 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-kube-api-access-s746k" (OuterVolumeSpecName: "kube-api-access-s746k") pod "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" (UID: "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34"). InnerVolumeSpecName "kube-api-access-s746k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.833822 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" (UID: "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.843448 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" (UID: "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.847444 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" (UID: "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.850107 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" (UID: "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.857396 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-config" (OuterVolumeSpecName: "config") pod "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" (UID: "68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.869301 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s746k\" (UniqueName: \"kubernetes.io/projected/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-kube-api-access-s746k\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.869343 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.869375 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.869386 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.869395 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:54 crc kubenswrapper[4708]: I0203 07:28:54.869402 4708 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:54 crc kubenswrapper[4708]: E0203 07:28:54.974221 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254 is running failed: container process not found" containerID="744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254" cmd=["/bin/true"] Feb 03 07:28:54 crc kubenswrapper[4708]: E0203 07:28:54.974316 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254 is running failed: container process not found" containerID="744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254" cmd=["/bin/true"] Feb 03 07:28:54 crc kubenswrapper[4708]: E0203 07:28:54.975008 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254 is running failed: container process not found" containerID="744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254" cmd=["/bin/true"] Feb 03 07:28:54 crc kubenswrapper[4708]: E0203 07:28:54.975073 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254 is running failed: container process not found" containerID="744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254" cmd=["/bin/true"] Feb 03 07:28:54 crc kubenswrapper[4708]: E0203 07:28:54.975314 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254 is running failed: container process not found" containerID="744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254" cmd=["/bin/true"] Feb 03 07:28:54 crc kubenswrapper[4708]: E0203 07:28:54.975356 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254 is running failed: container process not found" containerID="744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254" cmd=["/bin/true"] Feb 03 07:28:54 crc kubenswrapper[4708]: E0203 07:28:54.975385 4708 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254 is running failed: container process not found" probeType="Liveness" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" podUID="aeb72dfd-3f7b-41fa-882f-3290c463fcbe" containerName="ironic-neutron-agent" Feb 03 07:28:54 crc kubenswrapper[4708]: E0203 07:28:54.975407 4708 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254 is running failed: container process not found" probeType="Readiness" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" podUID="aeb72dfd-3f7b-41fa-882f-3290c463fcbe" containerName="ironic-neutron-agent" Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.010869 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-5fffdc6c76-m5s5d" event={"ID":"e7212cfb-233f-4a09-ae76-fcfe61a4ed14","Type":"ContainerStarted","Data":"35f2478d73ec6aa822ed68efd4464600d9534056e44c44bf0197b57863c54026"} Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.013576 4708 generic.go:334] "Generic (PLEG): container finished" podID="aeb72dfd-3f7b-41fa-882f-3290c463fcbe" containerID="744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254" exitCode=1 Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.013710 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" event={"ID":"aeb72dfd-3f7b-41fa-882f-3290c463fcbe","Type":"ContainerDied","Data":"744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254"} Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.014702 4708 scope.go:117] "RemoveContainer" containerID="744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254" Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.034367 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.034361 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-mmcjs" event={"ID":"68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34","Type":"ContainerDied","Data":"354304992c1ed1d5027970f7492bd85ba24c3c488fb1bfa0c2be5b2cb276a35f"} Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.034501 4708 scope.go:117] "RemoveContainer" containerID="6835d377390d09c23b80658fb15aeb1b1ea362aac4def772f32f47bbed819553" Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.039423 4708 generic.go:334] "Generic (PLEG): container finished" podID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" containerID="d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4" exitCode=0 Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.039466 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"87d3a294-c4b6-4ddf-9f60-c6afede1752a","Type":"ContainerDied","Data":"d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4"} Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.040688 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.042864 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-fc5cbdf64-524m4" event={"ID":"5d2f1707-b960-46ce-b412-6a16f8cc63c9","Type":"ContainerStarted","Data":"30454f427b56f4c4f9380d560f1135202646353aa4998b0560eb942399e78727"} Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.043105 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.067501 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-fc5cbdf64-524m4" podStartSLOduration=6.66319494 podStartE2EDuration="11.067480099s" podCreationTimestamp="2026-02-03 07:28:44 +0000 UTC" firstStartedPulling="2026-02-03 07:28:46.617900666 +0000 UTC m=+1105.599847473" lastFinishedPulling="2026-02-03 07:28:51.022185825 +0000 UTC m=+1110.004132632" observedRunningTime="2026-02-03 07:28:55.064057434 +0000 UTC m=+1114.046004241" watchObservedRunningTime="2026-02-03 07:28:55.067480099 +0000 UTC m=+1114.049426906" Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.088557 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-mmcjs"] Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.093701 4708 scope.go:117] "RemoveContainer" containerID="353916dd6c4b2f21a33049d16e0ce9a9fadfe67d91467b16f8be48b300620b90" Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.097320 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-mmcjs"] Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.422286 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-59974d4f4f-tvqgn" Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.496731 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-fb89f7cf6-7s2lp"] Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.496993 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-fb89f7cf6-7s2lp" podUID="ef60526c-751e-464d-a1c1-a50e343093b7" containerName="neutron-api" containerID="cri-o://b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4" gracePeriod=30 Feb 03 07:28:55 crc kubenswrapper[4708]: I0203 07:28:55.497090 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-fb89f7cf6-7s2lp" podUID="ef60526c-751e-464d-a1c1-a50e343093b7" containerName="neutron-httpd" containerID="cri-o://ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca" gracePeriod=30 Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.057372 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" event={"ID":"aeb72dfd-3f7b-41fa-882f-3290c463fcbe","Type":"ContainerStarted","Data":"8398b5ba17e634616cc84e36f75f9c35514c7e026577d62b4c96af19462ff71a"} Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.058737 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.062646 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-5fffdc6c76-m5s5d" event={"ID":"e7212cfb-233f-4a09-ae76-fcfe61a4ed14","Type":"ContainerStarted","Data":"7c405b1e2fcd5251d21866a2b23a8e4cb10f0a48e05bcfabec3431a4238718aa"} Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.063250 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.064692 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e8626bc3-c20f-47d2-b183-9d27e9ec814c","Type":"ContainerStarted","Data":"bf5ed9d8f01f0bd6f90eedc8cf64c98ada4b1a279da1644f609c97ba010ea40c"} Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.064721 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e8626bc3-c20f-47d2-b183-9d27e9ec814c","Type":"ContainerStarted","Data":"b999adc3face778b9bd15dfeb21a665954f97cbef98a2c7e9bf30f658c13e6e9"} Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.077408 4708 generic.go:334] "Generic (PLEG): container finished" podID="ef60526c-751e-464d-a1c1-a50e343093b7" containerID="ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca" exitCode=0 Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.077476 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fb89f7cf6-7s2lp" event={"ID":"ef60526c-751e-464d-a1c1-a50e343093b7","Type":"ContainerDied","Data":"ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca"} Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.101170 4708 generic.go:334] "Generic (PLEG): container finished" podID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerID="9d805bac8f679e32aa699786bb1af0ae5fe6e7a380948ad5525982c8df9b5351" exitCode=1 Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.102200 4708 scope.go:117] "RemoveContainer" containerID="9d805bac8f679e32aa699786bb1af0ae5fe6e7a380948ad5525982c8df9b5351" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.111387 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" path="/var/lib/kubelet/pods/68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34/volumes" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.112547 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-fc5cbdf64-524m4" event={"ID":"5d2f1707-b960-46ce-b412-6a16f8cc63c9","Type":"ContainerDied","Data":"9d805bac8f679e32aa699786bb1af0ae5fe6e7a380948ad5525982c8df9b5351"} Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.114956 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-5fffdc6c76-m5s5d" podStartSLOduration=8.114938798 podStartE2EDuration="8.114938798s" podCreationTimestamp="2026-02-03 07:28:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:56.101493796 +0000 UTC m=+1115.083440613" watchObservedRunningTime="2026-02-03 07:28:56.114938798 +0000 UTC m=+1115.096885605" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.528814 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-85b58fb76c-jldbq"] Feb 03 07:28:56 crc kubenswrapper[4708]: E0203 07:28:56.529480 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" containerName="dnsmasq-dns" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.529500 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" containerName="dnsmasq-dns" Feb 03 07:28:56 crc kubenswrapper[4708]: E0203 07:28:56.529511 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" containerName="init" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.529519 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" containerName="init" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.529669 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="68c2cee4-5f7b-4d1b-b47c-29e20c5a2d34" containerName="dnsmasq-dns" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.530677 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.536118 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.536524 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.544074 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.567200 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-85b58fb76c-jldbq"] Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.713548 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-run-httpd\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.713598 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-log-httpd\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.713624 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-internal-tls-certs\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.713684 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-combined-ca-bundle\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.713716 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcq9c\" (UniqueName: \"kubernetes.io/projected/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-kube-api-access-dcq9c\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.713741 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-public-tls-certs\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.713768 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-etc-swift\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.713811 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-config-data\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.815320 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-combined-ca-bundle\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.815384 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcq9c\" (UniqueName: \"kubernetes.io/projected/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-kube-api-access-dcq9c\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.815413 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-public-tls-certs\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.815445 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-etc-swift\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.815472 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-config-data\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.815525 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-run-httpd\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.815547 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-log-httpd\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.815565 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-internal-tls-certs\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.816616 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-run-httpd\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.816821 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-log-httpd\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.822544 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-internal-tls-certs\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.823869 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-combined-ca-bundle\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.824820 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-public-tls-certs\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.830440 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-config-data\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.838785 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-etc-swift\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.839167 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcq9c\" (UniqueName: \"kubernetes.io/projected/7e6d6f02-2176-4c8f-93c4-cb78832fc2d3-kube-api-access-dcq9c\") pod \"swift-proxy-85b58fb76c-jldbq\" (UID: \"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3\") " pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:56 crc kubenswrapper[4708]: I0203 07:28:56.865207 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.125104 4708 generic.go:334] "Generic (PLEG): container finished" podID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerID="413092028f6bbc984f610a50fb38670b9647f6df58504ac8acb890d90c2a1f1a" exitCode=1 Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.125483 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-fc5cbdf64-524m4" event={"ID":"5d2f1707-b960-46ce-b412-6a16f8cc63c9","Type":"ContainerDied","Data":"413092028f6bbc984f610a50fb38670b9647f6df58504ac8acb890d90c2a1f1a"} Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.125517 4708 scope.go:117] "RemoveContainer" containerID="9d805bac8f679e32aa699786bb1af0ae5fe6e7a380948ad5525982c8df9b5351" Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.125692 4708 scope.go:117] "RemoveContainer" containerID="413092028f6bbc984f610a50fb38670b9647f6df58504ac8acb890d90c2a1f1a" Feb 03 07:28:57 crc kubenswrapper[4708]: E0203 07:28:57.126024 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ironic-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ironic-api pod=ironic-fc5cbdf64-524m4_openstack(5d2f1707-b960-46ce-b412-6a16f8cc63c9)\"" pod="openstack/ironic-fc5cbdf64-524m4" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.132735 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e8626bc3-c20f-47d2-b183-9d27e9ec814c","Type":"ContainerStarted","Data":"c9ac32151b3d0be2b504ea5cac5e25e3eea0879b18c65b86799a7a3dd18d75af"} Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.133004 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.182115 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.182094554 podStartE2EDuration="5.182094554s" podCreationTimestamp="2026-02-03 07:28:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:57.175148252 +0000 UTC m=+1116.157095059" watchObservedRunningTime="2026-02-03 07:28:57.182094554 +0000 UTC m=+1116.164041361" Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.408146 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.408778 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="ceilometer-central-agent" containerID="cri-o://49f614fc3555c349bf26cd79ba6a6429ff608907148876b7afc98942341e68fc" gracePeriod=30 Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.408937 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="proxy-httpd" containerID="cri-o://2cf24d631a6d0e8fd15fc3665ff572f69a128fdbd5933872e3f6d8a68b6a8745" gracePeriod=30 Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.408991 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="sg-core" containerID="cri-o://b45de0da0fc624bc80579b5ec95c09dc58036ec34f442d9ab4fec7468a543f23" gracePeriod=30 Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.409036 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="ceilometer-notification-agent" containerID="cri-o://03b075fb16dc6bb72666761a9bf466dc0cf6610a5e3ad65335540ced0501e8f9" gracePeriod=30 Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.445280 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Feb 03 07:28:57 crc kubenswrapper[4708]: I0203 07:28:57.461059 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-85b58fb76c-jldbq"] Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.149045 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.156763 4708 generic.go:334] "Generic (PLEG): container finished" podID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerID="2cf24d631a6d0e8fd15fc3665ff572f69a128fdbd5933872e3f6d8a68b6a8745" exitCode=0 Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.156889 4708 generic.go:334] "Generic (PLEG): container finished" podID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerID="b45de0da0fc624bc80579b5ec95c09dc58036ec34f442d9ab4fec7468a543f23" exitCode=2 Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.156896 4708 generic.go:334] "Generic (PLEG): container finished" podID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerID="49f614fc3555c349bf26cd79ba6a6429ff608907148876b7afc98942341e68fc" exitCode=0 Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.156868 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3aa4c5d7-28f2-4fa2-9430-4865754b335e","Type":"ContainerDied","Data":"2cf24d631a6d0e8fd15fc3665ff572f69a128fdbd5933872e3f6d8a68b6a8745"} Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.157015 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3aa4c5d7-28f2-4fa2-9430-4865754b335e","Type":"ContainerDied","Data":"b45de0da0fc624bc80579b5ec95c09dc58036ec34f442d9ab4fec7468a543f23"} Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.157027 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3aa4c5d7-28f2-4fa2-9430-4865754b335e","Type":"ContainerDied","Data":"49f614fc3555c349bf26cd79ba6a6429ff608907148876b7afc98942341e68fc"} Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.161703 4708 scope.go:117] "RemoveContainer" containerID="413092028f6bbc984f610a50fb38670b9647f6df58504ac8acb890d90c2a1f1a" Feb 03 07:28:58 crc kubenswrapper[4708]: E0203 07:28:58.161911 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ironic-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ironic-api pod=ironic-fc5cbdf64-524m4_openstack(5d2f1707-b960-46ce-b412-6a16f8cc63c9)\"" pod="openstack/ironic-fc5cbdf64-524m4" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.165562 4708 generic.go:334] "Generic (PLEG): container finished" podID="ef60526c-751e-464d-a1c1-a50e343093b7" containerID="b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4" exitCode=0 Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.165622 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fb89f7cf6-7s2lp" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.165853 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fb89f7cf6-7s2lp" event={"ID":"ef60526c-751e-464d-a1c1-a50e343093b7","Type":"ContainerDied","Data":"b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4"} Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.165884 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fb89f7cf6-7s2lp" event={"ID":"ef60526c-751e-464d-a1c1-a50e343093b7","Type":"ContainerDied","Data":"9f23619057a460ed30640c807f9c9ea0e3e77a9a0bb04774c02f9c7ffb2683c0"} Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.165902 4708 scope.go:117] "RemoveContainer" containerID="ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.177141 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-85b58fb76c-jldbq" event={"ID":"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3","Type":"ContainerStarted","Data":"50b7c673e8797190ce50233a386e42fe7f0c7281cf5c85f264edfb6af80654b0"} Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.177379 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-85b58fb76c-jldbq" event={"ID":"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3","Type":"ContainerStarted","Data":"620f4b91c3da55ef953229b68355f82dc58e800375b7bfca52d716b81f5dc39c"} Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.177391 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-85b58fb76c-jldbq" event={"ID":"7e6d6f02-2176-4c8f-93c4-cb78832fc2d3","Type":"ContainerStarted","Data":"c384b243ac6831faf5e3d8b0d9a8dd5dabe6eec19724fca064a155d1b6574058"} Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.178774 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.236456 4708 scope.go:117] "RemoveContainer" containerID="b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.242699 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-85b58fb76c-jldbq" podStartSLOduration=2.242679088 podStartE2EDuration="2.242679088s" podCreationTimestamp="2026-02-03 07:28:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:28:58.228101927 +0000 UTC m=+1117.210048734" watchObservedRunningTime="2026-02-03 07:28:58.242679088 +0000 UTC m=+1117.224625895" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.243650 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-httpd-config\") pod \"ef60526c-751e-464d-a1c1-a50e343093b7\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.243691 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rxl4\" (UniqueName: \"kubernetes.io/projected/ef60526c-751e-464d-a1c1-a50e343093b7-kube-api-access-5rxl4\") pod \"ef60526c-751e-464d-a1c1-a50e343093b7\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.243731 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-ovndb-tls-certs\") pod \"ef60526c-751e-464d-a1c1-a50e343093b7\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.243812 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-combined-ca-bundle\") pod \"ef60526c-751e-464d-a1c1-a50e343093b7\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.243880 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-config\") pod \"ef60526c-751e-464d-a1c1-a50e343093b7\" (UID: \"ef60526c-751e-464d-a1c1-a50e343093b7\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.263709 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef60526c-751e-464d-a1c1-a50e343093b7-kube-api-access-5rxl4" (OuterVolumeSpecName: "kube-api-access-5rxl4") pod "ef60526c-751e-464d-a1c1-a50e343093b7" (UID: "ef60526c-751e-464d-a1c1-a50e343093b7"). InnerVolumeSpecName "kube-api-access-5rxl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.266960 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "ef60526c-751e-464d-a1c1-a50e343093b7" (UID: "ef60526c-751e-464d-a1c1-a50e343093b7"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.293561 4708 scope.go:117] "RemoveContainer" containerID="ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca" Feb 03 07:28:58 crc kubenswrapper[4708]: E0203 07:28:58.294100 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca\": container with ID starting with ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca not found: ID does not exist" containerID="ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.294125 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca"} err="failed to get container status \"ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca\": rpc error: code = NotFound desc = could not find container \"ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca\": container with ID starting with ce804deb4f437e893bdf37dfd4abb5c3b9ed636431ed5eb6463046f333a08aca not found: ID does not exist" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.294145 4708 scope.go:117] "RemoveContainer" containerID="b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4" Feb 03 07:28:58 crc kubenswrapper[4708]: E0203 07:28:58.294369 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4\": container with ID starting with b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4 not found: ID does not exist" containerID="b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.294386 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4"} err="failed to get container status \"b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4\": rpc error: code = NotFound desc = could not find container \"b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4\": container with ID starting with b7fd16193d5b8bbe074e4c28feac1cdf29d65aa2feadce3b8badb1eb8df6afa4 not found: ID does not exist" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.308609 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-config" (OuterVolumeSpecName: "config") pod "ef60526c-751e-464d-a1c1-a50e343093b7" (UID: "ef60526c-751e-464d-a1c1-a50e343093b7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.311838 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef60526c-751e-464d-a1c1-a50e343093b7" (UID: "ef60526c-751e-464d-a1c1-a50e343093b7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.326914 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "ef60526c-751e-464d-a1c1-a50e343093b7" (UID: "ef60526c-751e-464d-a1c1-a50e343093b7"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.347111 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rxl4\" (UniqueName: \"kubernetes.io/projected/ef60526c-751e-464d-a1c1-a50e343093b7-kube-api-access-5rxl4\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.347143 4708 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.347152 4708 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.347162 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.347171 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ef60526c-751e-464d-a1c1-a50e343093b7-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.544314 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-fb89f7cf6-7s2lp"] Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.552696 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-fb89f7cf6-7s2lp"] Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.833064 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.963583 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/87d3a294-c4b6-4ddf-9f60-c6afede1752a-etc-machine-id\") pod \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.963655 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svrdd\" (UniqueName: \"kubernetes.io/projected/87d3a294-c4b6-4ddf-9f60-c6afede1752a-kube-api-access-svrdd\") pod \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.963679 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-combined-ca-bundle\") pod \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.963709 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-scripts\") pod \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.963739 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data-custom\") pod \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.963805 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/87d3a294-c4b6-4ddf-9f60-c6afede1752a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "87d3a294-c4b6-4ddf-9f60-c6afede1752a" (UID: "87d3a294-c4b6-4ddf-9f60-c6afede1752a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.964540 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data\") pod \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\" (UID: \"87d3a294-c4b6-4ddf-9f60-c6afede1752a\") " Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.964981 4708 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/87d3a294-c4b6-4ddf-9f60-c6afede1752a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.968745 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87d3a294-c4b6-4ddf-9f60-c6afede1752a-kube-api-access-svrdd" (OuterVolumeSpecName: "kube-api-access-svrdd") pod "87d3a294-c4b6-4ddf-9f60-c6afede1752a" (UID: "87d3a294-c4b6-4ddf-9f60-c6afede1752a"). InnerVolumeSpecName "kube-api-access-svrdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.970056 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-scripts" (OuterVolumeSpecName: "scripts") pod "87d3a294-c4b6-4ddf-9f60-c6afede1752a" (UID: "87d3a294-c4b6-4ddf-9f60-c6afede1752a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:58 crc kubenswrapper[4708]: I0203 07:28:58.979137 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "87d3a294-c4b6-4ddf-9f60-c6afede1752a" (UID: "87d3a294-c4b6-4ddf-9f60-c6afede1752a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.031429 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "87d3a294-c4b6-4ddf-9f60-c6afede1752a" (UID: "87d3a294-c4b6-4ddf-9f60-c6afede1752a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.067578 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svrdd\" (UniqueName: \"kubernetes.io/projected/87d3a294-c4b6-4ddf-9f60-c6afede1752a-kube-api-access-svrdd\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.067893 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.067902 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.067911 4708 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.162190 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data" (OuterVolumeSpecName: "config-data") pod "87d3a294-c4b6-4ddf-9f60-c6afede1752a" (UID: "87d3a294-c4b6-4ddf-9f60-c6afede1752a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.173047 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87d3a294-c4b6-4ddf-9f60-c6afede1752a-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.203039 4708 generic.go:334] "Generic (PLEG): container finished" podID="aeb72dfd-3f7b-41fa-882f-3290c463fcbe" containerID="8398b5ba17e634616cc84e36f75f9c35514c7e026577d62b4c96af19462ff71a" exitCode=1 Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.203120 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" event={"ID":"aeb72dfd-3f7b-41fa-882f-3290c463fcbe","Type":"ContainerDied","Data":"8398b5ba17e634616cc84e36f75f9c35514c7e026577d62b4c96af19462ff71a"} Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.203157 4708 scope.go:117] "RemoveContainer" containerID="744e15cc631e7a66e10f42708d11d559e626c5fee30e0669f563bde5e0051254" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.203822 4708 scope.go:117] "RemoveContainer" containerID="8398b5ba17e634616cc84e36f75f9c35514c7e026577d62b4c96af19462ff71a" Feb 03 07:28:59 crc kubenswrapper[4708]: E0203 07:28:59.204032 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ironic-neutron-agent\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ironic-neutron-agent pod=ironic-neutron-agent-95b7948fb-x2nkv_openstack(aeb72dfd-3f7b-41fa-882f-3290c463fcbe)\"" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" podUID="aeb72dfd-3f7b-41fa-882f-3290c463fcbe" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.213080 4708 generic.go:334] "Generic (PLEG): container finished" podID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerID="03b075fb16dc6bb72666761a9bf466dc0cf6610a5e3ad65335540ced0501e8f9" exitCode=0 Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.213181 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3aa4c5d7-28f2-4fa2-9430-4865754b335e","Type":"ContainerDied","Data":"03b075fb16dc6bb72666761a9bf466dc0cf6610a5e3ad65335540ced0501e8f9"} Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.227178 4708 generic.go:334] "Generic (PLEG): container finished" podID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" containerID="817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e" exitCode=0 Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.229464 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.229879 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"87d3a294-c4b6-4ddf-9f60-c6afede1752a","Type":"ContainerDied","Data":"817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e"} Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.229943 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"87d3a294-c4b6-4ddf-9f60-c6afede1752a","Type":"ContainerDied","Data":"1f39bdfcf9124b095f03f5e2066d27f5cf8147e14a13269c2d7e13db3a4471c9"} Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.230070 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.232155 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.306850 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.332548 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357133 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:28:59 crc kubenswrapper[4708]: E0203 07:28:59.357504 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="ceilometer-notification-agent" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357516 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="ceilometer-notification-agent" Feb 03 07:28:59 crc kubenswrapper[4708]: E0203 07:28:59.357526 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef60526c-751e-464d-a1c1-a50e343093b7" containerName="neutron-api" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357532 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef60526c-751e-464d-a1c1-a50e343093b7" containerName="neutron-api" Feb 03 07:28:59 crc kubenswrapper[4708]: E0203 07:28:59.357548 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" containerName="probe" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357554 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" containerName="probe" Feb 03 07:28:59 crc kubenswrapper[4708]: E0203 07:28:59.357566 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef60526c-751e-464d-a1c1-a50e343093b7" containerName="neutron-httpd" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357572 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef60526c-751e-464d-a1c1-a50e343093b7" containerName="neutron-httpd" Feb 03 07:28:59 crc kubenswrapper[4708]: E0203 07:28:59.357586 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="proxy-httpd" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357592 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="proxy-httpd" Feb 03 07:28:59 crc kubenswrapper[4708]: E0203 07:28:59.357603 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="ceilometer-central-agent" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357609 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="ceilometer-central-agent" Feb 03 07:28:59 crc kubenswrapper[4708]: E0203 07:28:59.357620 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" containerName="cinder-scheduler" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357626 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" containerName="cinder-scheduler" Feb 03 07:28:59 crc kubenswrapper[4708]: E0203 07:28:59.357640 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="sg-core" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357646 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="sg-core" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357824 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef60526c-751e-464d-a1c1-a50e343093b7" containerName="neutron-httpd" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357840 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="sg-core" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357847 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="proxy-httpd" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357857 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef60526c-751e-464d-a1c1-a50e343093b7" containerName="neutron-api" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357872 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" containerName="probe" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357881 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="ceilometer-notification-agent" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357893 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" containerName="cinder-scheduler" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.357903 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" containerName="ceilometer-central-agent" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.358810 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.366041 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.375756 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-config-data\") pod \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.375818 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-combined-ca-bundle\") pod \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.375938 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-log-httpd\") pod \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.375989 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-scripts\") pod \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.376006 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-sg-core-conf-yaml\") pod \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.376210 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pckj\" (UniqueName: \"kubernetes.io/projected/3aa4c5d7-28f2-4fa2-9430-4865754b335e-kube-api-access-5pckj\") pod \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.376237 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-run-httpd\") pod \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\" (UID: \"3aa4c5d7-28f2-4fa2-9430-4865754b335e\") " Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.377047 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3aa4c5d7-28f2-4fa2-9430-4865754b335e" (UID: "3aa4c5d7-28f2-4fa2-9430-4865754b335e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.377323 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3aa4c5d7-28f2-4fa2-9430-4865754b335e" (UID: "3aa4c5d7-28f2-4fa2-9430-4865754b335e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.395345 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.399510 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-scripts" (OuterVolumeSpecName: "scripts") pod "3aa4c5d7-28f2-4fa2-9430-4865754b335e" (UID: "3aa4c5d7-28f2-4fa2-9430-4865754b335e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.429577 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3aa4c5d7-28f2-4fa2-9430-4865754b335e-kube-api-access-5pckj" (OuterVolumeSpecName: "kube-api-access-5pckj") pod "3aa4c5d7-28f2-4fa2-9430-4865754b335e" (UID: "3aa4c5d7-28f2-4fa2-9430-4865754b335e"). InnerVolumeSpecName "kube-api-access-5pckj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.478992 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-scripts\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.479052 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.479077 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtvd8\" (UniqueName: \"kubernetes.io/projected/55a84e3b-6f9a-44d0-b059-2a4c842810dc-kube-api-access-wtvd8\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.479116 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55a84e3b-6f9a-44d0-b059-2a4c842810dc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.479160 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.479209 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-config-data\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.479278 4708 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.479294 4708 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3aa4c5d7-28f2-4fa2-9430-4865754b335e-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.479303 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.479311 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pckj\" (UniqueName: \"kubernetes.io/projected/3aa4c5d7-28f2-4fa2-9430-4865754b335e-kube-api-access-5pckj\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.502975 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3aa4c5d7-28f2-4fa2-9430-4865754b335e" (UID: "3aa4c5d7-28f2-4fa2-9430-4865754b335e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.580531 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.580671 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtvd8\" (UniqueName: \"kubernetes.io/projected/55a84e3b-6f9a-44d0-b059-2a4c842810dc-kube-api-access-wtvd8\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.580723 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55a84e3b-6f9a-44d0-b059-2a4c842810dc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.580816 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55a84e3b-6f9a-44d0-b059-2a4c842810dc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.580878 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.580947 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-config-data\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.581002 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-scripts\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.582145 4708 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.594342 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-scripts\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.599286 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.611440 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-config-data\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.614598 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55a84e3b-6f9a-44d0-b059-2a4c842810dc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.622395 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtvd8\" (UniqueName: \"kubernetes.io/projected/55a84e3b-6f9a-44d0-b059-2a4c842810dc-kube-api-access-wtvd8\") pod \"cinder-scheduler-0\" (UID: \"55a84e3b-6f9a-44d0-b059-2a4c842810dc\") " pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.639058 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-config-data" (OuterVolumeSpecName: "config-data") pod "3aa4c5d7-28f2-4fa2-9430-4865754b335e" (UID: "3aa4c5d7-28f2-4fa2-9430-4865754b335e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.664726 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3aa4c5d7-28f2-4fa2-9430-4865754b335e" (UID: "3aa4c5d7-28f2-4fa2-9430-4865754b335e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.667045 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.683712 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.683740 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa4c5d7-28f2-4fa2-9430-4865754b335e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.715023 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-inspector-db-sync-v45nm"] Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.716405 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.719256 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-scripts" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.719469 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-config-data" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.725303 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-db-sync-v45nm"] Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.749421 4708 scope.go:117] "RemoveContainer" containerID="d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.886497 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/74d486e0-cafe-4001-a817-dea3959bb928-etc-podinfo\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.886548 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-config\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.886603 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.886632 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-combined-ca-bundle\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.886745 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vrp5\" (UniqueName: \"kubernetes.io/projected/74d486e0-cafe-4001-a817-dea3959bb928-kube-api-access-2vrp5\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.886909 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.886980 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-scripts\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.925527 4708 scope.go:117] "RemoveContainer" containerID="817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.930365 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-5fffdc6c76-m5s5d" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.973957 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.993182 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-scripts\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.993233 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/74d486e0-cafe-4001-a817-dea3959bb928-etc-podinfo\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.993254 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-config\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.993272 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.993291 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-combined-ca-bundle\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.993329 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vrp5\" (UniqueName: \"kubernetes.io/projected/74d486e0-cafe-4001-a817-dea3959bb928-kube-api-access-2vrp5\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.993413 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.993859 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.994263 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.998070 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-scripts\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:28:59 crc kubenswrapper[4708]: I0203 07:28:59.998192 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-config\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.001205 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-combined-ca-bundle\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.009357 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/74d486e0-cafe-4001-a817-dea3959bb928-etc-podinfo\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.013206 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-fc5cbdf64-524m4"] Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.013544 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ironic-fc5cbdf64-524m4" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="ironic-api-log" containerID="cri-o://30454f427b56f4c4f9380d560f1135202646353aa4998b0560eb942399e78727" gracePeriod=60 Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.019216 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vrp5\" (UniqueName: \"kubernetes.io/projected/74d486e0-cafe-4001-a817-dea3959bb928-kube-api-access-2vrp5\") pod \"ironic-inspector-db-sync-v45nm\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.048056 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.112037 4708 scope.go:117] "RemoveContainer" containerID="d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4" Feb 03 07:29:00 crc kubenswrapper[4708]: E0203 07:29:00.112431 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4\": container with ID starting with d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4 not found: ID does not exist" containerID="d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.112474 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4"} err="failed to get container status \"d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4\": rpc error: code = NotFound desc = could not find container \"d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4\": container with ID starting with d8ae524c874cf48067c46d7f7808c30018fc04bf0e7b75fe04d1d8eaae3f43a4 not found: ID does not exist" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.112515 4708 scope.go:117] "RemoveContainer" containerID="817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e" Feb 03 07:29:00 crc kubenswrapper[4708]: E0203 07:29:00.112955 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e\": container with ID starting with 817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e not found: ID does not exist" containerID="817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.112993 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e"} err="failed to get container status \"817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e\": rpc error: code = NotFound desc = could not find container \"817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e\": container with ID starting with 817a73249ce67147d44e8e552c0fbbff5e8ad933ce2a428e698f7a8c56a8114e not found: ID does not exist" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.174230 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87d3a294-c4b6-4ddf-9f60-c6afede1752a" path="/var/lib/kubelet/pods/87d3a294-c4b6-4ddf-9f60-c6afede1752a/volumes" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.175354 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef60526c-751e-464d-a1c1-a50e343093b7" path="/var/lib/kubelet/pods/ef60526c-751e-464d-a1c1-a50e343093b7/volumes" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.285699 4708 scope.go:117] "RemoveContainer" containerID="8398b5ba17e634616cc84e36f75f9c35514c7e026577d62b4c96af19462ff71a" Feb 03 07:29:00 crc kubenswrapper[4708]: E0203 07:29:00.286228 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ironic-neutron-agent\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ironic-neutron-agent pod=ironic-neutron-agent-95b7948fb-x2nkv_openstack(aeb72dfd-3f7b-41fa-882f-3290c463fcbe)\"" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" podUID="aeb72dfd-3f7b-41fa-882f-3290c463fcbe" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.307153 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3aa4c5d7-28f2-4fa2-9430-4865754b335e","Type":"ContainerDied","Data":"e18e7c42adce1072464179115012f13cda533dd77b189538923906d964166717"} Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.307211 4708 scope.go:117] "RemoveContainer" containerID="2cf24d631a6d0e8fd15fc3665ff572f69a128fdbd5933872e3f6d8a68b6a8745" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.315485 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.438938 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.450092 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.467858 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.471189 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.474584 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.474851 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.497605 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.502597 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.586344 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:00 crc kubenswrapper[4708]: E0203 07:29:00.587342 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-mzvl5 log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="47ef73e2-564e-47a6-9796-f61d5542d740" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.600006 4708 scope.go:117] "RemoveContainer" containerID="b45de0da0fc624bc80579b5ec95c09dc58036ec34f442d9ab4fec7468a543f23" Feb 03 07:29:00 crc kubenswrapper[4708]: W0203 07:29:00.608613 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55a84e3b_6f9a_44d0_b059_2a4c842810dc.slice/crio-4b8dd8aa4178c11dceef2b68e3bedd51d6d9f2e9714cd03d839c571c799ee30a WatchSource:0}: Error finding container 4b8dd8aa4178c11dceef2b68e3bedd51d6d9f2e9714cd03d839c571c799ee30a: Status 404 returned error can't find the container with id 4b8dd8aa4178c11dceef2b68e3bedd51d6d9f2e9714cd03d839c571c799ee30a Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.621097 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.625687 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.625814 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-scripts\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.625849 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-config-data\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.625897 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-log-httpd\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.625985 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzvl5\" (UniqueName: \"kubernetes.io/projected/47ef73e2-564e-47a6-9796-f61d5542d740-kube-api-access-mzvl5\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.626024 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.626050 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-run-httpd\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.667544 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-db-sync-v45nm"] Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.728201 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-config-data\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.728598 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-log-httpd\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.728656 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzvl5\" (UniqueName: \"kubernetes.io/projected/47ef73e2-564e-47a6-9796-f61d5542d740-kube-api-access-mzvl5\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.728683 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.728703 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-run-httpd\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.728750 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.728839 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-scripts\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.730010 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-log-httpd\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.730189 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-run-httpd\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.732934 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-scripts\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.733462 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-config-data\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.734187 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.734231 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.747897 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzvl5\" (UniqueName: \"kubernetes.io/projected/47ef73e2-564e-47a6-9796-f61d5542d740-kube-api-access-mzvl5\") pod \"ceilometer-0\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " pod="openstack/ceilometer-0" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.801749 4708 scope.go:117] "RemoveContainer" containerID="03b075fb16dc6bb72666761a9bf466dc0cf6610a5e3ad65335540ced0501e8f9" Feb 03 07:29:00 crc kubenswrapper[4708]: I0203 07:29:00.840075 4708 scope.go:117] "RemoveContainer" containerID="49f614fc3555c349bf26cd79ba6a6429ff608907148876b7afc98942341e68fc" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.329833 4708 generic.go:334] "Generic (PLEG): container finished" podID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerID="30454f427b56f4c4f9380d560f1135202646353aa4998b0560eb942399e78727" exitCode=143 Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.329884 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-fc5cbdf64-524m4" event={"ID":"5d2f1707-b960-46ce-b412-6a16f8cc63c9","Type":"ContainerDied","Data":"30454f427b56f4c4f9380d560f1135202646353aa4998b0560eb942399e78727"} Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.331714 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"55a84e3b-6f9a-44d0-b059-2a4c842810dc","Type":"ContainerStarted","Data":"4b8dd8aa4178c11dceef2b68e3bedd51d6d9f2e9714cd03d839c571c799ee30a"} Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.333611 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-sync-v45nm" event={"ID":"74d486e0-cafe-4001-a817-dea3959bb928","Type":"ContainerStarted","Data":"cdb0cfb4f06b4c52a38a9d5f8e360b251c39fad2c154fdc584451625bf26afc9"} Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.333659 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.374690 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.545251 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-run-httpd\") pod \"47ef73e2-564e-47a6-9796-f61d5542d740\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.545333 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-scripts\") pod \"47ef73e2-564e-47a6-9796-f61d5542d740\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.545393 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-sg-core-conf-yaml\") pod \"47ef73e2-564e-47a6-9796-f61d5542d740\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.545444 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-config-data\") pod \"47ef73e2-564e-47a6-9796-f61d5542d740\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.545476 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-log-httpd\") pod \"47ef73e2-564e-47a6-9796-f61d5542d740\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.545625 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-combined-ca-bundle\") pod \"47ef73e2-564e-47a6-9796-f61d5542d740\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.545673 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzvl5\" (UniqueName: \"kubernetes.io/projected/47ef73e2-564e-47a6-9796-f61d5542d740-kube-api-access-mzvl5\") pod \"47ef73e2-564e-47a6-9796-f61d5542d740\" (UID: \"47ef73e2-564e-47a6-9796-f61d5542d740\") " Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.546243 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "47ef73e2-564e-47a6-9796-f61d5542d740" (UID: "47ef73e2-564e-47a6-9796-f61d5542d740"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.546264 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "47ef73e2-564e-47a6-9796-f61d5542d740" (UID: "47ef73e2-564e-47a6-9796-f61d5542d740"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.553115 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47ef73e2-564e-47a6-9796-f61d5542d740" (UID: "47ef73e2-564e-47a6-9796-f61d5542d740"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.553828 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "47ef73e2-564e-47a6-9796-f61d5542d740" (UID: "47ef73e2-564e-47a6-9796-f61d5542d740"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.561897 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-config-data" (OuterVolumeSpecName: "config-data") pod "47ef73e2-564e-47a6-9796-f61d5542d740" (UID: "47ef73e2-564e-47a6-9796-f61d5542d740"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.566202 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47ef73e2-564e-47a6-9796-f61d5542d740-kube-api-access-mzvl5" (OuterVolumeSpecName: "kube-api-access-mzvl5") pod "47ef73e2-564e-47a6-9796-f61d5542d740" (UID: "47ef73e2-564e-47a6-9796-f61d5542d740"). InnerVolumeSpecName "kube-api-access-mzvl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.574011 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-scripts" (OuterVolumeSpecName: "scripts") pod "47ef73e2-564e-47a6-9796-f61d5542d740" (UID: "47ef73e2-564e-47a6-9796-f61d5542d740"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.647614 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.647646 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzvl5\" (UniqueName: \"kubernetes.io/projected/47ef73e2-564e-47a6-9796-f61d5542d740-kube-api-access-mzvl5\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.647658 4708 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.647666 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.647678 4708 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.647687 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ef73e2-564e-47a6-9796-f61d5542d740-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:01 crc kubenswrapper[4708]: I0203 07:29:01.647694 4708 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47ef73e2-564e-47a6-9796-f61d5542d740-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.111494 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3aa4c5d7-28f2-4fa2-9430-4865754b335e" path="/var/lib/kubelet/pods/3aa4c5d7-28f2-4fa2-9430-4865754b335e/volumes" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.344373 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.345359 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"55a84e3b-6f9a-44d0-b059-2a4c842810dc","Type":"ContainerStarted","Data":"08b429361ef05724461d1438173dab595dc8dd1bedfa26d389d47b72b985795e"} Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.436536 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.474337 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.486010 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.489454 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.493120 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.493338 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.499149 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.673981 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-scripts\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.674035 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-run-httpd\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.674070 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.674134 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.674171 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-log-httpd\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.674212 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-config-data\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.675210 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kzgf\" (UniqueName: \"kubernetes.io/projected/e1210a44-81ea-4f31-a263-219a2b36b92e-kube-api-access-6kzgf\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.776812 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-scripts\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.776875 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-run-httpd\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.776936 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.776982 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.777020 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-log-httpd\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.777065 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-config-data\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.777109 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kzgf\" (UniqueName: \"kubernetes.io/projected/e1210a44-81ea-4f31-a263-219a2b36b92e-kube-api-access-6kzgf\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.779070 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-run-httpd\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.781166 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-log-httpd\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.790997 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-scripts\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.792064 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-config-data\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.802522 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kzgf\" (UniqueName: \"kubernetes.io/projected/e1210a44-81ea-4f31-a263-219a2b36b92e-kube-api-access-6kzgf\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.802716 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.809837 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " pod="openstack/ceilometer-0" Feb 03 07:29:02 crc kubenswrapper[4708]: I0203 07:29:02.831414 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:04 crc kubenswrapper[4708]: I0203 07:29:04.115678 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47ef73e2-564e-47a6-9796-f61d5542d740" path="/var/lib/kubelet/pods/47ef73e2-564e-47a6-9796-f61d5542d740/volumes" Feb 03 07:29:04 crc kubenswrapper[4708]: I0203 07:29:04.557274 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 03 07:29:04 crc kubenswrapper[4708]: I0203 07:29:04.876730 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:29:04 crc kubenswrapper[4708]: I0203 07:29:04.877277 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="23415315-630b-4b47-91ac-ac60c2af15bc" containerName="glance-log" containerID="cri-o://4949d4af0b3256c66c52e0346ea55d77c3aa7ced56092f943478e2fdd5947463" gracePeriod=30 Feb 03 07:29:04 crc kubenswrapper[4708]: I0203 07:29:04.877368 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="23415315-630b-4b47-91ac-ac60c2af15bc" containerName="glance-httpd" containerID="cri-o://5ef0e97126bf1548a5b4cf7e74d0655789449f0fec641413e3e0dfdeeff18b54" gracePeriod=30 Feb 03 07:29:05 crc kubenswrapper[4708]: I0203 07:29:05.253945 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:05 crc kubenswrapper[4708]: I0203 07:29:05.381037 4708 generic.go:334] "Generic (PLEG): container finished" podID="23415315-630b-4b47-91ac-ac60c2af15bc" containerID="4949d4af0b3256c66c52e0346ea55d77c3aa7ced56092f943478e2fdd5947463" exitCode=143 Feb 03 07:29:05 crc kubenswrapper[4708]: I0203 07:29:05.381078 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23415315-630b-4b47-91ac-ac60c2af15bc","Type":"ContainerDied","Data":"4949d4af0b3256c66c52e0346ea55d77c3aa7ced56092f943478e2fdd5947463"} Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.331402 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-dkt4g"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.332564 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dkt4g" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.347339 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-dkt4g"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.436379 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-cs5gv"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.437690 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cs5gv" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.448179 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k6b5\" (UniqueName: \"kubernetes.io/projected/6396b5c7-1019-4539-b518-3fa061f6e53a-kube-api-access-9k6b5\") pod \"nova-api-db-create-dkt4g\" (UID: \"6396b5c7-1019-4539-b518-3fa061f6e53a\") " pod="openstack/nova-api-db-create-dkt4g" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.448269 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6396b5c7-1019-4539-b518-3fa061f6e53a-operator-scripts\") pod \"nova-api-db-create-dkt4g\" (UID: \"6396b5c7-1019-4539-b518-3fa061f6e53a\") " pod="openstack/nova-api-db-create-dkt4g" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.450316 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-cs5gv"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.537985 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-84b5-account-create-update-mbh6v"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.539885 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-84b5-account-create-update-mbh6v" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.548147 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-tgz8m"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.549650 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tgz8m" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.551046 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6396b5c7-1019-4539-b518-3fa061f6e53a-operator-scripts\") pod \"nova-api-db-create-dkt4g\" (UID: \"6396b5c7-1019-4539-b518-3fa061f6e53a\") " pod="openstack/nova-api-db-create-dkt4g" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.551177 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txgg2\" (UniqueName: \"kubernetes.io/projected/2f88611b-6078-4735-9ae1-8f2408ea7457-kube-api-access-txgg2\") pod \"nova-cell0-db-create-cs5gv\" (UID: \"2f88611b-6078-4735-9ae1-8f2408ea7457\") " pod="openstack/nova-cell0-db-create-cs5gv" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.551272 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k6b5\" (UniqueName: \"kubernetes.io/projected/6396b5c7-1019-4539-b518-3fa061f6e53a-kube-api-access-9k6b5\") pod \"nova-api-db-create-dkt4g\" (UID: \"6396b5c7-1019-4539-b518-3fa061f6e53a\") " pod="openstack/nova-api-db-create-dkt4g" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.551530 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f88611b-6078-4735-9ae1-8f2408ea7457-operator-scripts\") pod \"nova-cell0-db-create-cs5gv\" (UID: \"2f88611b-6078-4735-9ae1-8f2408ea7457\") " pod="openstack/nova-cell0-db-create-cs5gv" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.552485 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6396b5c7-1019-4539-b518-3fa061f6e53a-operator-scripts\") pod \"nova-api-db-create-dkt4g\" (UID: \"6396b5c7-1019-4539-b518-3fa061f6e53a\") " pod="openstack/nova-api-db-create-dkt4g" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.556151 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.557342 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-tgz8m"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.568483 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-84b5-account-create-update-mbh6v"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.589320 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k6b5\" (UniqueName: \"kubernetes.io/projected/6396b5c7-1019-4539-b518-3fa061f6e53a-kube-api-access-9k6b5\") pod \"nova-api-db-create-dkt4g\" (UID: \"6396b5c7-1019-4539-b518-3fa061f6e53a\") " pod="openstack/nova-api-db-create-dkt4g" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.652820 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txgg2\" (UniqueName: \"kubernetes.io/projected/2f88611b-6078-4735-9ae1-8f2408ea7457-kube-api-access-txgg2\") pod \"nova-cell0-db-create-cs5gv\" (UID: \"2f88611b-6078-4735-9ae1-8f2408ea7457\") " pod="openstack/nova-cell0-db-create-cs5gv" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.652886 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hg7rs\" (UniqueName: \"kubernetes.io/projected/4db39912-1ffe-48e4-b392-f993bbf6ee46-kube-api-access-hg7rs\") pod \"nova-cell1-db-create-tgz8m\" (UID: \"4db39912-1ffe-48e4-b392-f993bbf6ee46\") " pod="openstack/nova-cell1-db-create-tgz8m" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.652944 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-operator-scripts\") pod \"nova-api-84b5-account-create-update-mbh6v\" (UID: \"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3\") " pod="openstack/nova-api-84b5-account-create-update-mbh6v" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.652986 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f88611b-6078-4735-9ae1-8f2408ea7457-operator-scripts\") pod \"nova-cell0-db-create-cs5gv\" (UID: \"2f88611b-6078-4735-9ae1-8f2408ea7457\") " pod="openstack/nova-cell0-db-create-cs5gv" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.653020 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-px2qg\" (UniqueName: \"kubernetes.io/projected/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-kube-api-access-px2qg\") pod \"nova-api-84b5-account-create-update-mbh6v\" (UID: \"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3\") " pod="openstack/nova-api-84b5-account-create-update-mbh6v" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.653050 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4db39912-1ffe-48e4-b392-f993bbf6ee46-operator-scripts\") pod \"nova-cell1-db-create-tgz8m\" (UID: \"4db39912-1ffe-48e4-b392-f993bbf6ee46\") " pod="openstack/nova-cell1-db-create-tgz8m" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.654120 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f88611b-6078-4735-9ae1-8f2408ea7457-operator-scripts\") pod \"nova-cell0-db-create-cs5gv\" (UID: \"2f88611b-6078-4735-9ae1-8f2408ea7457\") " pod="openstack/nova-cell0-db-create-cs5gv" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.654399 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dkt4g" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.659686 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.671080 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" containerName="glance-log" containerID="cri-o://8d3298517ad9b8842840c7bc34dadc402448b09b2a75e3100078876011ed57d2" gracePeriod=30 Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.672107 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" containerName="glance-httpd" containerID="cri-o://cfcd0a24d7512c21f898ada8ee60c205fdc22386975fa6038833b779dca8ea6d" gracePeriod=30 Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.675296 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txgg2\" (UniqueName: \"kubernetes.io/projected/2f88611b-6078-4735-9ae1-8f2408ea7457-kube-api-access-txgg2\") pod \"nova-cell0-db-create-cs5gv\" (UID: \"2f88611b-6078-4735-9ae1-8f2408ea7457\") " pod="openstack/nova-cell0-db-create-cs5gv" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.749917 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.756705 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-operator-scripts\") pod \"nova-api-84b5-account-create-update-mbh6v\" (UID: \"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3\") " pod="openstack/nova-api-84b5-account-create-update-mbh6v" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.756829 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-px2qg\" (UniqueName: \"kubernetes.io/projected/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-kube-api-access-px2qg\") pod \"nova-api-84b5-account-create-update-mbh6v\" (UID: \"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3\") " pod="openstack/nova-api-84b5-account-create-update-mbh6v" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.756878 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4db39912-1ffe-48e4-b392-f993bbf6ee46-operator-scripts\") pod \"nova-cell1-db-create-tgz8m\" (UID: \"4db39912-1ffe-48e4-b392-f993bbf6ee46\") " pod="openstack/nova-cell1-db-create-tgz8m" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.756964 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hg7rs\" (UniqueName: \"kubernetes.io/projected/4db39912-1ffe-48e4-b392-f993bbf6ee46-kube-api-access-hg7rs\") pod \"nova-cell1-db-create-tgz8m\" (UID: \"4db39912-1ffe-48e4-b392-f993bbf6ee46\") " pod="openstack/nova-cell1-db-create-tgz8m" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.758073 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-operator-scripts\") pod \"nova-api-84b5-account-create-update-mbh6v\" (UID: \"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3\") " pod="openstack/nova-api-84b5-account-create-update-mbh6v" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.759257 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4db39912-1ffe-48e4-b392-f993bbf6ee46-operator-scripts\") pod \"nova-cell1-db-create-tgz8m\" (UID: \"4db39912-1ffe-48e4-b392-f993bbf6ee46\") " pod="openstack/nova-cell1-db-create-tgz8m" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.759896 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cs5gv" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.765084 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-5965-account-create-update-wtxml"] Feb 03 07:29:06 crc kubenswrapper[4708]: E0203 07:29:06.765478 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="ironic-api-log" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.765490 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="ironic-api-log" Feb 03 07:29:06 crc kubenswrapper[4708]: E0203 07:29:06.765508 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="ironic-api" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.765514 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="ironic-api" Feb 03 07:29:06 crc kubenswrapper[4708]: E0203 07:29:06.765527 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="init" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.765533 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="init" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.765689 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="ironic-api" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.765706 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="ironic-api-log" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.766282 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5965-account-create-update-wtxml" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.769517 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.779026 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hg7rs\" (UniqueName: \"kubernetes.io/projected/4db39912-1ffe-48e4-b392-f993bbf6ee46-kube-api-access-hg7rs\") pod \"nova-cell1-db-create-tgz8m\" (UID: \"4db39912-1ffe-48e4-b392-f993bbf6ee46\") " pod="openstack/nova-cell1-db-create-tgz8m" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.781077 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5965-account-create-update-wtxml"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.796254 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-px2qg\" (UniqueName: \"kubernetes.io/projected/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-kube-api-access-px2qg\") pod \"nova-api-84b5-account-create-update-mbh6v\" (UID: \"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3\") " pod="openstack/nova-api-84b5-account-create-update-mbh6v" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.858194 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-merged\") pod \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.858253 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94489\" (UniqueName: \"kubernetes.io/projected/5d2f1707-b960-46ce-b412-6a16f8cc63c9-kube-api-access-94489\") pod \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.858292 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-scripts\") pod \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.858465 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-logs\") pod \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.858537 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/5d2f1707-b960-46ce-b412-6a16f8cc63c9-etc-podinfo\") pod \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.858559 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data\") pod \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.858590 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-combined-ca-bundle\") pod \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.858610 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-custom\") pod \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\" (UID: \"5d2f1707-b960-46ce-b412-6a16f8cc63c9\") " Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.858920 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "5d2f1707-b960-46ce-b412-6a16f8cc63c9" (UID: "5d2f1707-b960-46ce-b412-6a16f8cc63c9"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.858930 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bfkk\" (UniqueName: \"kubernetes.io/projected/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-kube-api-access-8bfkk\") pod \"nova-cell0-5965-account-create-update-wtxml\" (UID: \"b1abbf4d-806b-40fa-9e1f-b415c5f8488e\") " pod="openstack/nova-cell0-5965-account-create-update-wtxml" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.859044 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-operator-scripts\") pod \"nova-cell0-5965-account-create-update-wtxml\" (UID: \"b1abbf4d-806b-40fa-9e1f-b415c5f8488e\") " pod="openstack/nova-cell0-5965-account-create-update-wtxml" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.859361 4708 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-merged\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.860671 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-logs" (OuterVolumeSpecName: "logs") pod "5d2f1707-b960-46ce-b412-6a16f8cc63c9" (UID: "5d2f1707-b960-46ce-b412-6a16f8cc63c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.862643 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5d2f1707-b960-46ce-b412-6a16f8cc63c9" (UID: "5d2f1707-b960-46ce-b412-6a16f8cc63c9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.863486 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/5d2f1707-b960-46ce-b412-6a16f8cc63c9-etc-podinfo" (OuterVolumeSpecName: "etc-podinfo") pod "5d2f1707-b960-46ce-b412-6a16f8cc63c9" (UID: "5d2f1707-b960-46ce-b412-6a16f8cc63c9"). InnerVolumeSpecName "etc-podinfo". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.864895 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-scripts" (OuterVolumeSpecName: "scripts") pod "5d2f1707-b960-46ce-b412-6a16f8cc63c9" (UID: "5d2f1707-b960-46ce-b412-6a16f8cc63c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.867776 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d2f1707-b960-46ce-b412-6a16f8cc63c9-kube-api-access-94489" (OuterVolumeSpecName: "kube-api-access-94489") pod "5d2f1707-b960-46ce-b412-6a16f8cc63c9" (UID: "5d2f1707-b960-46ce-b412-6a16f8cc63c9"). InnerVolumeSpecName "kube-api-access-94489". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.870752 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.875406 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-85b58fb76c-jldbq" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.892950 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data" (OuterVolumeSpecName: "config-data") pod "5d2f1707-b960-46ce-b412-6a16f8cc63c9" (UID: "5d2f1707-b960-46ce-b412-6a16f8cc63c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.937911 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d2f1707-b960-46ce-b412-6a16f8cc63c9" (UID: "5d2f1707-b960-46ce-b412-6a16f8cc63c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.960588 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bfkk\" (UniqueName: \"kubernetes.io/projected/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-kube-api-access-8bfkk\") pod \"nova-cell0-5965-account-create-update-wtxml\" (UID: \"b1abbf4d-806b-40fa-9e1f-b415c5f8488e\") " pod="openstack/nova-cell0-5965-account-create-update-wtxml" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.960634 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-operator-scripts\") pod \"nova-cell0-5965-account-create-update-wtxml\" (UID: \"b1abbf4d-806b-40fa-9e1f-b415c5f8488e\") " pod="openstack/nova-cell0-5965-account-create-update-wtxml" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.960750 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d2f1707-b960-46ce-b412-6a16f8cc63c9-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.960761 4708 reconciler_common.go:293] "Volume detached for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/5d2f1707-b960-46ce-b412-6a16f8cc63c9-etc-podinfo\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.960772 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.960781 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.960808 4708 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.960819 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94489\" (UniqueName: \"kubernetes.io/projected/5d2f1707-b960-46ce-b412-6a16f8cc63c9-kube-api-access-94489\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.960828 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d2f1707-b960-46ce-b412-6a16f8cc63c9-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.965095 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-operator-scripts\") pod \"nova-cell0-5965-account-create-update-wtxml\" (UID: \"b1abbf4d-806b-40fa-9e1f-b415c5f8488e\") " pod="openstack/nova-cell0-5965-account-create-update-wtxml" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.966103 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-dd77-account-create-update-8bdgp"] Feb 03 07:29:06 crc kubenswrapper[4708]: E0203 07:29:06.966501 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="ironic-api" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.966514 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="ironic-api" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.966700 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" containerName="ironic-api" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.967343 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.969650 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.976941 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-84b5-account-create-update-mbh6v" Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.981748 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-dd77-account-create-update-8bdgp"] Feb 03 07:29:06 crc kubenswrapper[4708]: I0203 07:29:06.991749 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bfkk\" (UniqueName: \"kubernetes.io/projected/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-kube-api-access-8bfkk\") pod \"nova-cell0-5965-account-create-update-wtxml\" (UID: \"b1abbf4d-806b-40fa-9e1f-b415c5f8488e\") " pod="openstack/nova-cell0-5965-account-create-update-wtxml" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.038916 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tgz8m" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.062507 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c9ad1ec-0782-4fb8-a838-d44194d33047-operator-scripts\") pod \"nova-cell1-dd77-account-create-update-8bdgp\" (UID: \"7c9ad1ec-0782-4fb8-a838-d44194d33047\") " pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.062712 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r67m\" (UniqueName: \"kubernetes.io/projected/7c9ad1ec-0782-4fb8-a838-d44194d33047-kube-api-access-4r67m\") pod \"nova-cell1-dd77-account-create-update-8bdgp\" (UID: \"7c9ad1ec-0782-4fb8-a838-d44194d33047\") " pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.146291 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5965-account-create-update-wtxml" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.164906 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c9ad1ec-0782-4fb8-a838-d44194d33047-operator-scripts\") pod \"nova-cell1-dd77-account-create-update-8bdgp\" (UID: \"7c9ad1ec-0782-4fb8-a838-d44194d33047\") " pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.165064 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r67m\" (UniqueName: \"kubernetes.io/projected/7c9ad1ec-0782-4fb8-a838-d44194d33047-kube-api-access-4r67m\") pod \"nova-cell1-dd77-account-create-update-8bdgp\" (UID: \"7c9ad1ec-0782-4fb8-a838-d44194d33047\") " pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.166139 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c9ad1ec-0782-4fb8-a838-d44194d33047-operator-scripts\") pod \"nova-cell1-dd77-account-create-update-8bdgp\" (UID: \"7c9ad1ec-0782-4fb8-a838-d44194d33047\") " pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.185833 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r67m\" (UniqueName: \"kubernetes.io/projected/7c9ad1ec-0782-4fb8-a838-d44194d33047-kube-api-access-4r67m\") pod \"nova-cell1-dd77-account-create-update-8bdgp\" (UID: \"7c9ad1ec-0782-4fb8-a838-d44194d33047\") " pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.292932 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.399874 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-fc5cbdf64-524m4" event={"ID":"5d2f1707-b960-46ce-b412-6a16f8cc63c9","Type":"ContainerDied","Data":"b52dab6d7b406f0d72c04fbc3f85b910abdb79a2cf75e4dc633945e70777452c"} Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.399896 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-fc5cbdf64-524m4" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.399922 4708 scope.go:117] "RemoveContainer" containerID="413092028f6bbc984f610a50fb38670b9647f6df58504ac8acb890d90c2a1f1a" Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.402897 4708 generic.go:334] "Generic (PLEG): container finished" podID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" containerID="8d3298517ad9b8842840c7bc34dadc402448b09b2a75e3100078876011ed57d2" exitCode=143 Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.403137 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7","Type":"ContainerDied","Data":"8d3298517ad9b8842840c7bc34dadc402448b09b2a75e3100078876011ed57d2"} Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.429738 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-fc5cbdf64-524m4"] Feb 03 07:29:07 crc kubenswrapper[4708]: I0203 07:29:07.436875 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-fc5cbdf64-524m4"] Feb 03 07:29:08 crc kubenswrapper[4708]: I0203 07:29:08.106210 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d2f1707-b960-46ce-b412-6a16f8cc63c9" path="/var/lib/kubelet/pods/5d2f1707-b960-46ce-b412-6a16f8cc63c9/volumes" Feb 03 07:29:08 crc kubenswrapper[4708]: I0203 07:29:08.415053 4708 generic.go:334] "Generic (PLEG): container finished" podID="23415315-630b-4b47-91ac-ac60c2af15bc" containerID="5ef0e97126bf1548a5b4cf7e74d0655789449f0fec641413e3e0dfdeeff18b54" exitCode=0 Feb 03 07:29:08 crc kubenswrapper[4708]: I0203 07:29:08.415089 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23415315-630b-4b47-91ac-ac60c2af15bc","Type":"ContainerDied","Data":"5ef0e97126bf1548a5b4cf7e74d0655789449f0fec641413e3e0dfdeeff18b54"} Feb 03 07:29:10 crc kubenswrapper[4708]: I0203 07:29:10.440738 4708 generic.go:334] "Generic (PLEG): container finished" podID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" containerID="cfcd0a24d7512c21f898ada8ee60c205fdc22386975fa6038833b779dca8ea6d" exitCode=0 Feb 03 07:29:10 crc kubenswrapper[4708]: I0203 07:29:10.440831 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7","Type":"ContainerDied","Data":"cfcd0a24d7512c21f898ada8ee60c205fdc22386975fa6038833b779dca8ea6d"} Feb 03 07:29:12 crc kubenswrapper[4708]: I0203 07:29:12.113434 4708 scope.go:117] "RemoveContainer" containerID="8398b5ba17e634616cc84e36f75f9c35514c7e026577d62b4c96af19462ff71a" Feb 03 07:29:12 crc kubenswrapper[4708]: E0203 07:29:12.694231 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Feb 03 07:29:12 crc kubenswrapper[4708]: E0203 07:29:12.694448 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n88h576h679h65fh64h657hd9h65h7fh64dh56bhcfh65h66ch59hf4h5c4h65bh64ch64fh59bhf4h644h6bh649h9ch9bh58bh5bfh6fhcdhf9q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b9t44,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(db624ad8-1c0f-4100-b3a2-4c80e02c1b03): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:29:12 crc kubenswrapper[4708]: E0203 07:29:12.695635 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="db624ad8-1c0f-4100-b3a2-4c80e02c1b03" Feb 03 07:29:13 crc kubenswrapper[4708]: I0203 07:29:13.075221 4708 scope.go:117] "RemoveContainer" containerID="30454f427b56f4c4f9380d560f1135202646353aa4998b0560eb942399e78727" Feb 03 07:29:13 crc kubenswrapper[4708]: I0203 07:29:13.433011 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-dkt4g"] Feb 03 07:29:13 crc kubenswrapper[4708]: E0203 07:29:13.479875 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="openstack/openstackclient" podUID="db624ad8-1c0f-4100-b3a2-4c80e02c1b03" Feb 03 07:29:13 crc kubenswrapper[4708]: W0203 07:29:13.979142 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6396b5c7_1019_4539_b518_3fa061f6e53a.slice/crio-1dc6c88bdef291a5f096c716c599f4863e222463b5d2ecd08e46ba33fac7efe0 WatchSource:0}: Error finding container 1dc6c88bdef291a5f096c716c599f4863e222463b5d2ecd08e46ba33fac7efe0: Status 404 returned error can't find the container with id 1dc6c88bdef291a5f096c716c599f4863e222463b5d2ecd08e46ba33fac7efe0 Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.016470 4708 scope.go:117] "RemoveContainer" containerID="7112781cecec0ce36b5ad7ca5b033e477f7619ca067c4dc131cd12774f598f86" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.089940 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.231118 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ccsld\" (UniqueName: \"kubernetes.io/projected/23415315-630b-4b47-91ac-ac60c2af15bc-kube-api-access-ccsld\") pod \"23415315-630b-4b47-91ac-ac60c2af15bc\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.231760 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"23415315-630b-4b47-91ac-ac60c2af15bc\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.236193 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23415315-630b-4b47-91ac-ac60c2af15bc-kube-api-access-ccsld" (OuterVolumeSpecName: "kube-api-access-ccsld") pod "23415315-630b-4b47-91ac-ac60c2af15bc" (UID: "23415315-630b-4b47-91ac-ac60c2af15bc"). InnerVolumeSpecName "kube-api-access-ccsld". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.240014 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-config-data\") pod \"23415315-630b-4b47-91ac-ac60c2af15bc\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.240065 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-httpd-run\") pod \"23415315-630b-4b47-91ac-ac60c2af15bc\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.240123 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-logs\") pod \"23415315-630b-4b47-91ac-ac60c2af15bc\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.240162 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-combined-ca-bundle\") pod \"23415315-630b-4b47-91ac-ac60c2af15bc\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.240221 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-public-tls-certs\") pod \"23415315-630b-4b47-91ac-ac60c2af15bc\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.240280 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-scripts\") pod \"23415315-630b-4b47-91ac-ac60c2af15bc\" (UID: \"23415315-630b-4b47-91ac-ac60c2af15bc\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.241573 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "23415315-630b-4b47-91ac-ac60c2af15bc" (UID: "23415315-630b-4b47-91ac-ac60c2af15bc"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.241734 4708 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.241757 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ccsld\" (UniqueName: \"kubernetes.io/projected/23415315-630b-4b47-91ac-ac60c2af15bc-kube-api-access-ccsld\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.242155 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "23415315-630b-4b47-91ac-ac60c2af15bc" (UID: "23415315-630b-4b47-91ac-ac60c2af15bc"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.243095 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-logs" (OuterVolumeSpecName: "logs") pod "23415315-630b-4b47-91ac-ac60c2af15bc" (UID: "23415315-630b-4b47-91ac-ac60c2af15bc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.248164 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-scripts" (OuterVolumeSpecName: "scripts") pod "23415315-630b-4b47-91ac-ac60c2af15bc" (UID: "23415315-630b-4b47-91ac-ac60c2af15bc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.276307 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.316923 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23415315-630b-4b47-91ac-ac60c2af15bc" (UID: "23415315-630b-4b47-91ac-ac60c2af15bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.331272 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-config-data" (OuterVolumeSpecName: "config-data") pod "23415315-630b-4b47-91ac-ac60c2af15bc" (UID: "23415315-630b-4b47-91ac-ac60c2af15bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.343124 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.343161 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23415315-630b-4b47-91ac-ac60c2af15bc-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.343175 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.343188 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.343213 4708 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.363809 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "23415315-630b-4b47-91ac-ac60c2af15bc" (UID: "23415315-630b-4b47-91ac-ac60c2af15bc"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.404023 4708 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.448835 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-config-data\") pod \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.448959 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-logs\") pod \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.449019 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-httpd-run\") pod \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.449039 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-internal-tls-certs\") pod \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.449161 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jdch\" (UniqueName: \"kubernetes.io/projected/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-kube-api-access-9jdch\") pod \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.449192 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.449224 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-scripts\") pod \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.449263 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-combined-ca-bundle\") pod \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\" (UID: \"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7\") " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.449732 4708 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23415315-630b-4b47-91ac-ac60c2af15bc-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.449752 4708 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.450417 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" (UID: "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.450478 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-logs" (OuterVolumeSpecName: "logs") pod "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" (UID: "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.461643 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" (UID: "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.461821 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-scripts" (OuterVolumeSpecName: "scripts") pod "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" (UID: "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.469575 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-kube-api-access-9jdch" (OuterVolumeSpecName: "kube-api-access-9jdch") pod "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" (UID: "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7"). InnerVolumeSpecName "kube-api-access-9jdch". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.497775 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-dd77-account-create-update-8bdgp"] Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.508501 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" (UID: "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.508719 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.515084 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"23415315-630b-4b47-91ac-ac60c2af15bc","Type":"ContainerDied","Data":"2d1ec41ac80b27f5f753cc114b047e247fe73df3e8a34d7eb9c03c1dfe840c5d"} Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.515111 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.515145 4708 scope.go:117] "RemoveContainer" containerID="5ef0e97126bf1548a5b4cf7e74d0655789449f0fec641413e3e0dfdeeff18b54" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.527691 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-dkt4g" event={"ID":"6396b5c7-1019-4539-b518-3fa061f6e53a","Type":"ContainerStarted","Data":"1dc6c88bdef291a5f096c716c599f4863e222463b5d2ecd08e46ba33fac7efe0"} Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.534716 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1f4fde2c-f8ce-4722-93ee-6ddc16b128f7","Type":"ContainerDied","Data":"107e16d6a66b666f0cd66dbe9e26a204b3348d3c17dc9183cbdb94cfea5bd113"} Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.534876 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.571517 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.571876 4708 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.575786 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jdch\" (UniqueName: \"kubernetes.io/projected/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-kube-api-access-9jdch\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.575859 4708 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.575889 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.575903 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.642003 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-dkt4g" podStartSLOduration=8.641983462 podStartE2EDuration="8.641983462s" podCreationTimestamp="2026-02-03 07:29:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:29:14.549034754 +0000 UTC m=+1133.530981561" watchObservedRunningTime="2026-02-03 07:29:14.641983462 +0000 UTC m=+1133.623930269" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.655489 4708 scope.go:117] "RemoveContainer" containerID="4949d4af0b3256c66c52e0346ea55d77c3aa7ced56092f943478e2fdd5947463" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.655690 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.665708 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.677513 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:29:14 crc kubenswrapper[4708]: E0203 07:29:14.678015 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23415315-630b-4b47-91ac-ac60c2af15bc" containerName="glance-httpd" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.678030 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="23415315-630b-4b47-91ac-ac60c2af15bc" containerName="glance-httpd" Feb 03 07:29:14 crc kubenswrapper[4708]: E0203 07:29:14.678051 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" containerName="glance-httpd" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.678058 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" containerName="glance-httpd" Feb 03 07:29:14 crc kubenswrapper[4708]: E0203 07:29:14.678072 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23415315-630b-4b47-91ac-ac60c2af15bc" containerName="glance-log" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.678082 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="23415315-630b-4b47-91ac-ac60c2af15bc" containerName="glance-log" Feb 03 07:29:14 crc kubenswrapper[4708]: E0203 07:29:14.678099 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" containerName="glance-log" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.678106 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" containerName="glance-log" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.678308 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" containerName="glance-log" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.678324 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" containerName="glance-httpd" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.678338 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="23415315-630b-4b47-91ac-ac60c2af15bc" containerName="glance-log" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.678350 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="23415315-630b-4b47-91ac-ac60c2af15bc" containerName="glance-httpd" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.679294 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.682005 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.682028 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.699821 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.706306 4708 scope.go:117] "RemoveContainer" containerID="cfcd0a24d7512c21f898ada8ee60c205fdc22386975fa6038833b779dca8ea6d" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.729748 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-tgz8m"] Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.729829 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-config-data" (OuterVolumeSpecName: "config-data") pod "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" (UID: "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: W0203 07:29:14.739881 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4db39912_1ffe_48e4_b392_f993bbf6ee46.slice/crio-70e7449b051b9fd5bca33dfd9d518a3b53dc19d96800405171c49aa45d2ba541 WatchSource:0}: Error finding container 70e7449b051b9fd5bca33dfd9d518a3b53dc19d96800405171c49aa45d2ba541: Status 404 returned error can't find the container with id 70e7449b051b9fd5bca33dfd9d518a3b53dc19d96800405171c49aa45d2ba541 Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.748642 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-84b5-account-create-update-mbh6v"] Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.778414 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.778482 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-config-data\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.778527 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2n4f\" (UniqueName: \"kubernetes.io/projected/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-kube-api-access-n2n4f\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.778567 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-logs\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.778593 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-scripts\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.778648 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.778825 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.778885 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.778957 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.834647 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" (UID: "1f4fde2c-f8ce-4722-93ee-6ddc16b128f7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.853766 4708 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.880500 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-logs\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.880544 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-scripts\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.880587 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.880666 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.880711 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.880736 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.880764 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-config-data\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.880818 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2n4f\" (UniqueName: \"kubernetes.io/projected/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-kube-api-access-n2n4f\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.880864 4708 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.880879 4708 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.881189 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-logs\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.881705 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.881960 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.900343 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-cs5gv"] Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.904392 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.904686 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-scripts\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.905111 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2n4f\" (UniqueName: \"kubernetes.io/projected/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-kube-api-access-n2n4f\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.907387 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-config-data\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.907461 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1cfcb85-5e57-43d2-8255-4be0c18d60f0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.920758 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5965-account-create-update-wtxml"] Feb 03 07:29:14 crc kubenswrapper[4708]: W0203 07:29:14.935181 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1abbf4d_806b_40fa_9e1f_b415c5f8488e.slice/crio-2e1140d02376d6e62464a5e8ad68f549de6305ecea3fc929ec07158f895be485 WatchSource:0}: Error finding container 2e1140d02376d6e62464a5e8ad68f549de6305ecea3fc929ec07158f895be485: Status 404 returned error can't find the container with id 2e1140d02376d6e62464a5e8ad68f549de6305ecea3fc929ec07158f895be485 Feb 03 07:29:14 crc kubenswrapper[4708]: I0203 07:29:14.983818 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a1cfcb85-5e57-43d2-8255-4be0c18d60f0\") " pod="openstack/glance-default-external-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.184834 4708 scope.go:117] "RemoveContainer" containerID="8d3298517ad9b8842840c7bc34dadc402448b09b2a75e3100078876011ed57d2" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.360773 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.401195 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.413184 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.447739 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.450498 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.453720 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.459026 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.478947 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.547544 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" event={"ID":"aeb72dfd-3f7b-41fa-882f-3290c463fcbe","Type":"ContainerStarted","Data":"f66668937414dd96b5c7eb323040bda3ed21a5d328f2ad84fce72edb953e996d"} Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.548756 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.550070 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"361821ae-c957-4e31-bb9b-6d659aaceec4","Type":"ContainerStarted","Data":"59cc755623c02c8aa340f9d62fb0fb321384a59e60058aa76f639241ad1ecbe5"} Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.574160 4708 generic.go:334] "Generic (PLEG): container finished" podID="6396b5c7-1019-4539-b518-3fa061f6e53a" containerID="974f0cde8509f7cd3fc6bb2cfd2511f059acd4bf5536f8d8a03fab4746d7d185" exitCode=0 Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.574262 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-dkt4g" event={"ID":"6396b5c7-1019-4539-b518-3fa061f6e53a","Type":"ContainerDied","Data":"974f0cde8509f7cd3fc6bb2cfd2511f059acd4bf5536f8d8a03fab4746d7d185"} Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.575949 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1210a44-81ea-4f31-a263-219a2b36b92e","Type":"ContainerStarted","Data":"d04547aedb663d66e55e398710964da340c260985640ff7918117a9927fa7bbc"} Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.594749 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cs5gv" event={"ID":"2f88611b-6078-4735-9ae1-8f2408ea7457","Type":"ContainerStarted","Data":"d7421735a9f78794617a7a6ac771c4c521e350778516d1075f8d56e960d8b65e"} Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.606096 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.606157 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.606181 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.606269 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.606326 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.606378 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdpzg\" (UniqueName: \"kubernetes.io/projected/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-kube-api-access-fdpzg\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.606407 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.606431 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-logs\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.611823 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-84b5-account-create-update-mbh6v" event={"ID":"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3","Type":"ContainerStarted","Data":"f0d37780fd77b8107344feadd4cfee144eb66090b8157be38ae7d1e8c95369f4"} Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.620086 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-tgz8m" event={"ID":"4db39912-1ffe-48e4-b392-f993bbf6ee46","Type":"ContainerStarted","Data":"70e7449b051b9fd5bca33dfd9d518a3b53dc19d96800405171c49aa45d2ba541"} Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.637415 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" event={"ID":"7c9ad1ec-0782-4fb8-a838-d44194d33047","Type":"ContainerStarted","Data":"8cdce3b2ff2cf60d42076a7afc89c1243374e92fd95d28eb7b681e85c14e46bb"} Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.638627 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5965-account-create-update-wtxml" event={"ID":"b1abbf4d-806b-40fa-9e1f-b415c5f8488e","Type":"ContainerStarted","Data":"2e1140d02376d6e62464a5e8ad68f549de6305ecea3fc929ec07158f895be485"} Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.709134 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.709499 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.709542 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdpzg\" (UniqueName: \"kubernetes.io/projected/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-kube-api-access-fdpzg\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.709562 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.709851 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-logs\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.709932 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.710188 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.710214 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.716287 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-logs\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.716568 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.718688 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.720774 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.740091 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.740837 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.777642 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdpzg\" (UniqueName: \"kubernetes.io/projected/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-kube-api-access-fdpzg\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.784065 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:15 crc kubenswrapper[4708]: I0203 07:29:15.831527 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6\") " pod="openstack/glance-default-internal-api-0" Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.070178 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.114887 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f4fde2c-f8ce-4722-93ee-6ddc16b128f7" path="/var/lib/kubelet/pods/1f4fde2c-f8ce-4722-93ee-6ddc16b128f7/volumes" Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.116345 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23415315-630b-4b47-91ac-ac60c2af15bc" path="/var/lib/kubelet/pods/23415315-630b-4b47-91ac-ac60c2af15bc/volumes" Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.116521 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.652040 4708 generic.go:334] "Generic (PLEG): container finished" podID="b1abbf4d-806b-40fa-9e1f-b415c5f8488e" containerID="9d03a5d725cefee32def34f26713ce294442c88c66d86f4742260f4225207cbf" exitCode=0 Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.652318 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5965-account-create-update-wtxml" event={"ID":"b1abbf4d-806b-40fa-9e1f-b415c5f8488e","Type":"ContainerDied","Data":"9d03a5d725cefee32def34f26713ce294442c88c66d86f4742260f4225207cbf"} Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.658287 4708 generic.go:334] "Generic (PLEG): container finished" podID="87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3" containerID="f6fae03f10f324d13308c06f4b0040caf7f12316006f23477d73fc62be486bb9" exitCode=0 Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.658378 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-84b5-account-create-update-mbh6v" event={"ID":"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3","Type":"ContainerDied","Data":"f6fae03f10f324d13308c06f4b0040caf7f12316006f23477d73fc62be486bb9"} Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.671102 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-sync-v45nm" event={"ID":"74d486e0-cafe-4001-a817-dea3959bb928","Type":"ContainerStarted","Data":"6a98072eeac088ea0b77303d6f69b80b99b024caec7c13b24c26884c103ba984"} Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.680606 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1210a44-81ea-4f31-a263-219a2b36b92e","Type":"ContainerStarted","Data":"d13af06a7369cdd336b14f8dd4b87cf07029834be9499be55cdb6a89d4664991"} Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.680906 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1210a44-81ea-4f31-a263-219a2b36b92e","Type":"ContainerStarted","Data":"f0d587bd6086a22a2103e5f41b0d489589e1fe13a1fee7356636bc60344ed733"} Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.698546 4708 generic.go:334] "Generic (PLEG): container finished" podID="2f88611b-6078-4735-9ae1-8f2408ea7457" containerID="71846cf9a019d3554b1802eef445ea86d5912773f802d4f6b01863d261e10257" exitCode=0 Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.698969 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cs5gv" event={"ID":"2f88611b-6078-4735-9ae1-8f2408ea7457","Type":"ContainerDied","Data":"71846cf9a019d3554b1802eef445ea86d5912773f802d4f6b01863d261e10257"} Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.702424 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a1cfcb85-5e57-43d2-8255-4be0c18d60f0","Type":"ContainerStarted","Data":"9de04d6ee89c3cfccec3f68554f844a0a66552d6d812c32dd555c88e79f700e5"} Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.706482 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-inspector-db-sync-v45nm" podStartSLOduration=4.058797369 podStartE2EDuration="17.706466969s" podCreationTimestamp="2026-02-03 07:28:59 +0000 UTC" firstStartedPulling="2026-02-03 07:29:00.685069927 +0000 UTC m=+1119.667016734" lastFinishedPulling="2026-02-03 07:29:14.332739527 +0000 UTC m=+1133.314686334" observedRunningTime="2026-02-03 07:29:16.701028334 +0000 UTC m=+1135.682975141" watchObservedRunningTime="2026-02-03 07:29:16.706466969 +0000 UTC m=+1135.688413776" Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.713976 4708 generic.go:334] "Generic (PLEG): container finished" podID="4db39912-1ffe-48e4-b392-f993bbf6ee46" containerID="d5c4b67f03d1fe85a8d35d6b12b663f9ca8e7d87391745050db51aafca363b8c" exitCode=0 Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.714056 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-tgz8m" event={"ID":"4db39912-1ffe-48e4-b392-f993bbf6ee46","Type":"ContainerDied","Data":"d5c4b67f03d1fe85a8d35d6b12b663f9ca8e7d87391745050db51aafca363b8c"} Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.739907 4708 generic.go:334] "Generic (PLEG): container finished" podID="7c9ad1ec-0782-4fb8-a838-d44194d33047" containerID="1304aafdabbf3e81599db1cbeae7fc3ce845fd09231bd52d17af31f50ba2b468" exitCode=0 Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.739986 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" event={"ID":"7c9ad1ec-0782-4fb8-a838-d44194d33047","Type":"ContainerDied","Data":"1304aafdabbf3e81599db1cbeae7fc3ce845fd09231bd52d17af31f50ba2b468"} Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.750466 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.756703 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"55a84e3b-6f9a-44d0-b059-2a4c842810dc","Type":"ContainerStarted","Data":"2a433d3994a1926f593d219205e96b2bc9b33d6da1836ea53bc915040049155a"} Feb 03 07:29:16 crc kubenswrapper[4708]: I0203 07:29:16.793180 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=17.793158642 podStartE2EDuration="17.793158642s" podCreationTimestamp="2026-02-03 07:28:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:29:16.791110201 +0000 UTC m=+1135.773057008" watchObservedRunningTime="2026-02-03 07:29:16.793158642 +0000 UTC m=+1135.775105449" Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.245351 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dkt4g" Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.373618 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9k6b5\" (UniqueName: \"kubernetes.io/projected/6396b5c7-1019-4539-b518-3fa061f6e53a-kube-api-access-9k6b5\") pod \"6396b5c7-1019-4539-b518-3fa061f6e53a\" (UID: \"6396b5c7-1019-4539-b518-3fa061f6e53a\") " Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.373952 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6396b5c7-1019-4539-b518-3fa061f6e53a-operator-scripts\") pod \"6396b5c7-1019-4539-b518-3fa061f6e53a\" (UID: \"6396b5c7-1019-4539-b518-3fa061f6e53a\") " Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.380764 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6396b5c7-1019-4539-b518-3fa061f6e53a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6396b5c7-1019-4539-b518-3fa061f6e53a" (UID: "6396b5c7-1019-4539-b518-3fa061f6e53a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.381970 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6396b5c7-1019-4539-b518-3fa061f6e53a-kube-api-access-9k6b5" (OuterVolumeSpecName: "kube-api-access-9k6b5") pod "6396b5c7-1019-4539-b518-3fa061f6e53a" (UID: "6396b5c7-1019-4539-b518-3fa061f6e53a"). InnerVolumeSpecName "kube-api-access-9k6b5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.476351 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9k6b5\" (UniqueName: \"kubernetes.io/projected/6396b5c7-1019-4539-b518-3fa061f6e53a-kube-api-access-9k6b5\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.476386 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6396b5c7-1019-4539-b518-3fa061f6e53a-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.768952 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1210a44-81ea-4f31-a263-219a2b36b92e","Type":"ContainerStarted","Data":"54bbba2b07befa18370ec31387201631759267c88785ca533ecc209fca7ed35d"} Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.770777 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a1cfcb85-5e57-43d2-8255-4be0c18d60f0","Type":"ContainerStarted","Data":"66319eb292d0f9b7094937d32e4afadf3448019dcbcc2d3ac606b3ae16d8eae8"} Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.772964 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6","Type":"ContainerStarted","Data":"c3ca56113bfad7441945a9c3b662fc97648ea2e4470133fcc57b81de87d69af8"} Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.772984 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6","Type":"ContainerStarted","Data":"eddb17568e82efd86410550863627b0b2b8e972ba051a94c1aaf28e0a8a3084d"} Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.774304 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-dkt4g" event={"ID":"6396b5c7-1019-4539-b518-3fa061f6e53a","Type":"ContainerDied","Data":"1dc6c88bdef291a5f096c716c599f4863e222463b5d2ecd08e46ba33fac7efe0"} Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.774325 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1dc6c88bdef291a5f096c716c599f4863e222463b5d2ecd08e46ba33fac7efe0" Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.774342 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dkt4g" Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.775491 4708 generic.go:334] "Generic (PLEG): container finished" podID="74d486e0-cafe-4001-a817-dea3959bb928" containerID="6a98072eeac088ea0b77303d6f69b80b99b024caec7c13b24c26884c103ba984" exitCode=0 Feb 03 07:29:17 crc kubenswrapper[4708]: I0203 07:29:17.776015 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-sync-v45nm" event={"ID":"74d486e0-cafe-4001-a817-dea3959bb928","Type":"ContainerDied","Data":"6a98072eeac088ea0b77303d6f69b80b99b024caec7c13b24c26884c103ba984"} Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.156428 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5965-account-create-update-wtxml" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.309370 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-operator-scripts\") pod \"b1abbf4d-806b-40fa-9e1f-b415c5f8488e\" (UID: \"b1abbf4d-806b-40fa-9e1f-b415c5f8488e\") " Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.309440 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bfkk\" (UniqueName: \"kubernetes.io/projected/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-kube-api-access-8bfkk\") pod \"b1abbf4d-806b-40fa-9e1f-b415c5f8488e\" (UID: \"b1abbf4d-806b-40fa-9e1f-b415c5f8488e\") " Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.310556 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b1abbf4d-806b-40fa-9e1f-b415c5f8488e" (UID: "b1abbf4d-806b-40fa-9e1f-b415c5f8488e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.320026 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-kube-api-access-8bfkk" (OuterVolumeSpecName: "kube-api-access-8bfkk") pod "b1abbf4d-806b-40fa-9e1f-b415c5f8488e" (UID: "b1abbf4d-806b-40fa-9e1f-b415c5f8488e"). InnerVolumeSpecName "kube-api-access-8bfkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.391043 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tgz8m" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.394023 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-84b5-account-create-update-mbh6v" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.411437 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.411471 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bfkk\" (UniqueName: \"kubernetes.io/projected/b1abbf4d-806b-40fa-9e1f-b415c5f8488e-kube-api-access-8bfkk\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.519451 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hg7rs\" (UniqueName: \"kubernetes.io/projected/4db39912-1ffe-48e4-b392-f993bbf6ee46-kube-api-access-hg7rs\") pod \"4db39912-1ffe-48e4-b392-f993bbf6ee46\" (UID: \"4db39912-1ffe-48e4-b392-f993bbf6ee46\") " Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.519587 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-px2qg\" (UniqueName: \"kubernetes.io/projected/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-kube-api-access-px2qg\") pod \"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3\" (UID: \"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3\") " Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.519629 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-operator-scripts\") pod \"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3\" (UID: \"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3\") " Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.519684 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4db39912-1ffe-48e4-b392-f993bbf6ee46-operator-scripts\") pod \"4db39912-1ffe-48e4-b392-f993bbf6ee46\" (UID: \"4db39912-1ffe-48e4-b392-f993bbf6ee46\") " Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.521697 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3" (UID: "87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.521832 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4db39912-1ffe-48e4-b392-f993bbf6ee46-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4db39912-1ffe-48e4-b392-f993bbf6ee46" (UID: "4db39912-1ffe-48e4-b392-f993bbf6ee46"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.529116 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-kube-api-access-px2qg" (OuterVolumeSpecName: "kube-api-access-px2qg") pod "87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3" (UID: "87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3"). InnerVolumeSpecName "kube-api-access-px2qg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.529446 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4db39912-1ffe-48e4-b392-f993bbf6ee46-kube-api-access-hg7rs" (OuterVolumeSpecName: "kube-api-access-hg7rs") pod "4db39912-1ffe-48e4-b392-f993bbf6ee46" (UID: "4db39912-1ffe-48e4-b392-f993bbf6ee46"). InnerVolumeSpecName "kube-api-access-hg7rs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.572147 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.623541 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cs5gv" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.623671 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-px2qg\" (UniqueName: \"kubernetes.io/projected/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-kube-api-access-px2qg\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.623687 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.623696 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4db39912-1ffe-48e4-b392-f993bbf6ee46-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.623705 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hg7rs\" (UniqueName: \"kubernetes.io/projected/4db39912-1ffe-48e4-b392-f993bbf6ee46-kube-api-access-hg7rs\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.725259 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c9ad1ec-0782-4fb8-a838-d44194d33047-operator-scripts\") pod \"7c9ad1ec-0782-4fb8-a838-d44194d33047\" (UID: \"7c9ad1ec-0782-4fb8-a838-d44194d33047\") " Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.725352 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f88611b-6078-4735-9ae1-8f2408ea7457-operator-scripts\") pod \"2f88611b-6078-4735-9ae1-8f2408ea7457\" (UID: \"2f88611b-6078-4735-9ae1-8f2408ea7457\") " Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.725537 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r67m\" (UniqueName: \"kubernetes.io/projected/7c9ad1ec-0782-4fb8-a838-d44194d33047-kube-api-access-4r67m\") pod \"7c9ad1ec-0782-4fb8-a838-d44194d33047\" (UID: \"7c9ad1ec-0782-4fb8-a838-d44194d33047\") " Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.725601 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txgg2\" (UniqueName: \"kubernetes.io/projected/2f88611b-6078-4735-9ae1-8f2408ea7457-kube-api-access-txgg2\") pod \"2f88611b-6078-4735-9ae1-8f2408ea7457\" (UID: \"2f88611b-6078-4735-9ae1-8f2408ea7457\") " Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.726545 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f88611b-6078-4735-9ae1-8f2408ea7457-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2f88611b-6078-4735-9ae1-8f2408ea7457" (UID: "2f88611b-6078-4735-9ae1-8f2408ea7457"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.726869 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c9ad1ec-0782-4fb8-a838-d44194d33047-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7c9ad1ec-0782-4fb8-a838-d44194d33047" (UID: "7c9ad1ec-0782-4fb8-a838-d44194d33047"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.735369 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f88611b-6078-4735-9ae1-8f2408ea7457-kube-api-access-txgg2" (OuterVolumeSpecName: "kube-api-access-txgg2") pod "2f88611b-6078-4735-9ae1-8f2408ea7457" (UID: "2f88611b-6078-4735-9ae1-8f2408ea7457"). InnerVolumeSpecName "kube-api-access-txgg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.735447 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c9ad1ec-0782-4fb8-a838-d44194d33047-kube-api-access-4r67m" (OuterVolumeSpecName: "kube-api-access-4r67m") pod "7c9ad1ec-0782-4fb8-a838-d44194d33047" (UID: "7c9ad1ec-0782-4fb8-a838-d44194d33047"). InnerVolumeSpecName "kube-api-access-4r67m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.795502 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a1cfcb85-5e57-43d2-8255-4be0c18d60f0","Type":"ContainerStarted","Data":"78e2fb5263b4c866f2b025dec74448372ecb1317341ddc099a70ffa2875709d1"} Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.801617 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-tgz8m" event={"ID":"4db39912-1ffe-48e4-b392-f993bbf6ee46","Type":"ContainerDied","Data":"70e7449b051b9fd5bca33dfd9d518a3b53dc19d96800405171c49aa45d2ba541"} Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.801642 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tgz8m" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.801659 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70e7449b051b9fd5bca33dfd9d518a3b53dc19d96800405171c49aa45d2ba541" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.803922 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6","Type":"ContainerStarted","Data":"c2bdb1d3354a621c47826cc68f78892c95c729183e79f7dc399a92258b2f24f1"} Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.805176 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" event={"ID":"7c9ad1ec-0782-4fb8-a838-d44194d33047","Type":"ContainerDied","Data":"8cdce3b2ff2cf60d42076a7afc89c1243374e92fd95d28eb7b681e85c14e46bb"} Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.805203 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8cdce3b2ff2cf60d42076a7afc89c1243374e92fd95d28eb7b681e85c14e46bb" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.805253 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-dd77-account-create-update-8bdgp" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.813438 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5965-account-create-update-wtxml" event={"ID":"b1abbf4d-806b-40fa-9e1f-b415c5f8488e","Type":"ContainerDied","Data":"2e1140d02376d6e62464a5e8ad68f549de6305ecea3fc929ec07158f895be485"} Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.813497 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e1140d02376d6e62464a5e8ad68f549de6305ecea3fc929ec07158f895be485" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.813605 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5965-account-create-update-wtxml" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.818844 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.818818748 podStartE2EDuration="4.818818748s" podCreationTimestamp="2026-02-03 07:29:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:29:18.812897381 +0000 UTC m=+1137.794844218" watchObservedRunningTime="2026-02-03 07:29:18.818818748 +0000 UTC m=+1137.800765545" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.819060 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-84b5-account-create-update-mbh6v" event={"ID":"87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3","Type":"ContainerDied","Data":"f0d37780fd77b8107344feadd4cfee144eb66090b8157be38ae7d1e8c95369f4"} Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.819091 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0d37780fd77b8107344feadd4cfee144eb66090b8157be38ae7d1e8c95369f4" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.819150 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-84b5-account-create-update-mbh6v" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.826097 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cs5gv" event={"ID":"2f88611b-6078-4735-9ae1-8f2408ea7457","Type":"ContainerDied","Data":"d7421735a9f78794617a7a6ac771c4c521e350778516d1075f8d56e960d8b65e"} Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.826142 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7421735a9f78794617a7a6ac771c4c521e350778516d1075f8d56e960d8b65e" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.826172 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cs5gv" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.827497 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r67m\" (UniqueName: \"kubernetes.io/projected/7c9ad1ec-0782-4fb8-a838-d44194d33047-kube-api-access-4r67m\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.827525 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txgg2\" (UniqueName: \"kubernetes.io/projected/2f88611b-6078-4735-9ae1-8f2408ea7457-kube-api-access-txgg2\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.827540 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c9ad1ec-0782-4fb8-a838-d44194d33047-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:18 crc kubenswrapper[4708]: I0203 07:29:18.827551 4708 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f88611b-6078-4735-9ae1-8f2408ea7457-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.157234 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.174858 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.174840931 podStartE2EDuration="4.174840931s" podCreationTimestamp="2026-02-03 07:29:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:29:18.833090621 +0000 UTC m=+1137.815037428" watchObservedRunningTime="2026-02-03 07:29:19.174840931 +0000 UTC m=+1138.156787738" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.241729 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/74d486e0-cafe-4001-a817-dea3959bb928-etc-podinfo\") pod \"74d486e0-cafe-4001-a817-dea3959bb928\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.241842 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"74d486e0-cafe-4001-a817-dea3959bb928\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.241922 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vrp5\" (UniqueName: \"kubernetes.io/projected/74d486e0-cafe-4001-a817-dea3959bb928-kube-api-access-2vrp5\") pod \"74d486e0-cafe-4001-a817-dea3959bb928\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.241958 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-scripts\") pod \"74d486e0-cafe-4001-a817-dea3959bb928\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.242027 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-config\") pod \"74d486e0-cafe-4001-a817-dea3959bb928\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.242095 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-combined-ca-bundle\") pod \"74d486e0-cafe-4001-a817-dea3959bb928\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.242133 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic\") pod \"74d486e0-cafe-4001-a817-dea3959bb928\" (UID: \"74d486e0-cafe-4001-a817-dea3959bb928\") " Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.242334 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic-inspector-dhcp-hostsdir" (OuterVolumeSpecName: "var-lib-ironic-inspector-dhcp-hostsdir") pod "74d486e0-cafe-4001-a817-dea3959bb928" (UID: "74d486e0-cafe-4001-a817-dea3959bb928"). InnerVolumeSpecName "var-lib-ironic-inspector-dhcp-hostsdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.243031 4708 reconciler_common.go:293] "Volume detached for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic-inspector-dhcp-hostsdir\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.243415 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic" (OuterVolumeSpecName: "var-lib-ironic") pod "74d486e0-cafe-4001-a817-dea3959bb928" (UID: "74d486e0-cafe-4001-a817-dea3959bb928"). InnerVolumeSpecName "var-lib-ironic". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.246860 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74d486e0-cafe-4001-a817-dea3959bb928-kube-api-access-2vrp5" (OuterVolumeSpecName: "kube-api-access-2vrp5") pod "74d486e0-cafe-4001-a817-dea3959bb928" (UID: "74d486e0-cafe-4001-a817-dea3959bb928"). InnerVolumeSpecName "kube-api-access-2vrp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.247083 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/74d486e0-cafe-4001-a817-dea3959bb928-etc-podinfo" (OuterVolumeSpecName: "etc-podinfo") pod "74d486e0-cafe-4001-a817-dea3959bb928" (UID: "74d486e0-cafe-4001-a817-dea3959bb928"). InnerVolumeSpecName "etc-podinfo". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.254585 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-scripts" (OuterVolumeSpecName: "scripts") pod "74d486e0-cafe-4001-a817-dea3959bb928" (UID: "74d486e0-cafe-4001-a817-dea3959bb928"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.268453 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-config" (OuterVolumeSpecName: "config") pod "74d486e0-cafe-4001-a817-dea3959bb928" (UID: "74d486e0-cafe-4001-a817-dea3959bb928"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.272310 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "74d486e0-cafe-4001-a817-dea3959bb928" (UID: "74d486e0-cafe-4001-a817-dea3959bb928"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.344498 4708 reconciler_common.go:293] "Volume detached for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/74d486e0-cafe-4001-a817-dea3959bb928-etc-podinfo\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.344544 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vrp5\" (UniqueName: \"kubernetes.io/projected/74d486e0-cafe-4001-a817-dea3959bb928-kube-api-access-2vrp5\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.344558 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.344570 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.344583 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74d486e0-cafe-4001-a817-dea3959bb928-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.344594 4708 reconciler_common.go:293] "Volume detached for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/74d486e0-cafe-4001-a817-dea3959bb928-var-lib-ironic\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.667748 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.838346 4708 generic.go:334] "Generic (PLEG): container finished" podID="361821ae-c957-4e31-bb9b-6d659aaceec4" containerID="59cc755623c02c8aa340f9d62fb0fb321384a59e60058aa76f639241ad1ecbe5" exitCode=0 Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.838682 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"361821ae-c957-4e31-bb9b-6d659aaceec4","Type":"ContainerDied","Data":"59cc755623c02c8aa340f9d62fb0fb321384a59e60058aa76f639241ad1ecbe5"} Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.841217 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-db-sync-v45nm" event={"ID":"74d486e0-cafe-4001-a817-dea3959bb928","Type":"ContainerDied","Data":"cdb0cfb4f06b4c52a38a9d5f8e360b251c39fad2c154fdc584451625bf26afc9"} Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.841809 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdb0cfb4f06b4c52a38a9d5f8e360b251c39fad2c154fdc584451625bf26afc9" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.841228 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-db-sync-v45nm" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.850738 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1210a44-81ea-4f31-a263-219a2b36b92e","Type":"ContainerStarted","Data":"b102a755355936bffb5efa60355d471e6e85a0bd7b6db9fdfe04a527bd19a8a9"} Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.851451 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="ceilometer-central-agent" containerID="cri-o://f0d587bd6086a22a2103e5f41b0d489589e1fe13a1fee7356636bc60344ed733" gracePeriod=30 Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.851560 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="proxy-httpd" containerID="cri-o://b102a755355936bffb5efa60355d471e6e85a0bd7b6db9fdfe04a527bd19a8a9" gracePeriod=30 Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.851595 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="sg-core" containerID="cri-o://54bbba2b07befa18370ec31387201631759267c88785ca533ecc209fca7ed35d" gracePeriod=30 Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.851624 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="ceilometer-notification-agent" containerID="cri-o://d13af06a7369cdd336b14f8dd4b87cf07029834be9499be55cdb6a89d4664991" gracePeriod=30 Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.923432 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 03 07:29:19 crc kubenswrapper[4708]: I0203 07:29:19.945595 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=12.957168745 podStartE2EDuration="17.945578708s" podCreationTimestamp="2026-02-03 07:29:02 +0000 UTC" firstStartedPulling="2026-02-03 07:29:14.525537833 +0000 UTC m=+1133.507484640" lastFinishedPulling="2026-02-03 07:29:19.513947796 +0000 UTC m=+1138.495894603" observedRunningTime="2026-02-03 07:29:19.896064104 +0000 UTC m=+1138.878010921" watchObservedRunningTime="2026-02-03 07:29:19.945578708 +0000 UTC m=+1138.927525515" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.022241 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-neutron-agent-95b7948fb-x2nkv" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.121224 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-inspector-0"] Feb 03 07:29:20 crc kubenswrapper[4708]: E0203 07:29:20.121695 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c9ad1ec-0782-4fb8-a838-d44194d33047" containerName="mariadb-account-create-update" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.121714 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c9ad1ec-0782-4fb8-a838-d44194d33047" containerName="mariadb-account-create-update" Feb 03 07:29:20 crc kubenswrapper[4708]: E0203 07:29:20.121733 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1abbf4d-806b-40fa-9e1f-b415c5f8488e" containerName="mariadb-account-create-update" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.121740 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1abbf4d-806b-40fa-9e1f-b415c5f8488e" containerName="mariadb-account-create-update" Feb 03 07:29:20 crc kubenswrapper[4708]: E0203 07:29:20.121752 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6396b5c7-1019-4539-b518-3fa061f6e53a" containerName="mariadb-database-create" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.121758 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="6396b5c7-1019-4539-b518-3fa061f6e53a" containerName="mariadb-database-create" Feb 03 07:29:20 crc kubenswrapper[4708]: E0203 07:29:20.121782 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4db39912-1ffe-48e4-b392-f993bbf6ee46" containerName="mariadb-database-create" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.121789 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="4db39912-1ffe-48e4-b392-f993bbf6ee46" containerName="mariadb-database-create" Feb 03 07:29:20 crc kubenswrapper[4708]: E0203 07:29:20.121816 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74d486e0-cafe-4001-a817-dea3959bb928" containerName="ironic-inspector-db-sync" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.121824 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="74d486e0-cafe-4001-a817-dea3959bb928" containerName="ironic-inspector-db-sync" Feb 03 07:29:20 crc kubenswrapper[4708]: E0203 07:29:20.121841 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3" containerName="mariadb-account-create-update" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.121848 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3" containerName="mariadb-account-create-update" Feb 03 07:29:20 crc kubenswrapper[4708]: E0203 07:29:20.121861 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f88611b-6078-4735-9ae1-8f2408ea7457" containerName="mariadb-database-create" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.121868 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f88611b-6078-4735-9ae1-8f2408ea7457" containerName="mariadb-database-create" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.122071 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c9ad1ec-0782-4fb8-a838-d44194d33047" containerName="mariadb-account-create-update" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.122089 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1abbf4d-806b-40fa-9e1f-b415c5f8488e" containerName="mariadb-account-create-update" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.122103 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="74d486e0-cafe-4001-a817-dea3959bb928" containerName="ironic-inspector-db-sync" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.122116 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="6396b5c7-1019-4539-b518-3fa061f6e53a" containerName="mariadb-database-create" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.122128 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="4db39912-1ffe-48e4-b392-f993bbf6ee46" containerName="mariadb-database-create" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.122144 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f88611b-6078-4735-9ae1-8f2408ea7457" containerName="mariadb-database-create" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.122158 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3" containerName="mariadb-account-create-update" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.126977 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.130004 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-config-data" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.138125 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-scripts" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.144426 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-0"] Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.274920 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsxcz\" (UniqueName: \"kubernetes.io/projected/b15f6fec-b538-480e-b5a6-644b39d68a59-kube-api-access-zsxcz\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.274999 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-config\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.275056 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/b15f6fec-b538-480e-b5a6-644b39d68a59-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.275137 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.275161 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.275179 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-scripts\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.275284 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.376538 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.376973 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsxcz\" (UniqueName: \"kubernetes.io/projected/b15f6fec-b538-480e-b5a6-644b39d68a59-kube-api-access-zsxcz\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.377062 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.377030 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-config\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.377776 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/b15f6fec-b538-480e-b5a6-644b39d68a59-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.377847 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.377865 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.377899 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-scripts\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.378253 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.382685 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-config\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.383058 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-scripts\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.383199 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/b15f6fec-b538-480e-b5a6-644b39d68a59-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.385199 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.396982 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsxcz\" (UniqueName: \"kubernetes.io/projected/b15f6fec-b538-480e-b5a6-644b39d68a59-kube-api-access-zsxcz\") pod \"ironic-inspector-0\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.456438 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.866535 4708 generic.go:334] "Generic (PLEG): container finished" podID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerID="b102a755355936bffb5efa60355d471e6e85a0bd7b6db9fdfe04a527bd19a8a9" exitCode=0 Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.866993 4708 generic.go:334] "Generic (PLEG): container finished" podID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerID="54bbba2b07befa18370ec31387201631759267c88785ca533ecc209fca7ed35d" exitCode=2 Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.867019 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1210a44-81ea-4f31-a263-219a2b36b92e","Type":"ContainerDied","Data":"b102a755355936bffb5efa60355d471e6e85a0bd7b6db9fdfe04a527bd19a8a9"} Feb 03 07:29:20 crc kubenswrapper[4708]: I0203 07:29:20.867049 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1210a44-81ea-4f31-a263-219a2b36b92e","Type":"ContainerDied","Data":"54bbba2b07befa18370ec31387201631759267c88785ca533ecc209fca7ed35d"} Feb 03 07:29:21 crc kubenswrapper[4708]: I0203 07:29:21.129279 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-0"] Feb 03 07:29:21 crc kubenswrapper[4708]: W0203 07:29:21.136578 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb15f6fec_b538_480e_b5a6_644b39d68a59.slice/crio-610dd6bc90b623b9f7dbd0602c925da92768918cfd12ac1a125e1538ce859837 WatchSource:0}: Error finding container 610dd6bc90b623b9f7dbd0602c925da92768918cfd12ac1a125e1538ce859837: Status 404 returned error can't find the container with id 610dd6bc90b623b9f7dbd0602c925da92768918cfd12ac1a125e1538ce859837 Feb 03 07:29:21 crc kubenswrapper[4708]: I0203 07:29:21.883191 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"b15f6fec-b538-480e-b5a6-644b39d68a59","Type":"ContainerStarted","Data":"4740e3835897f232c1f89b26407c3719d1d28cbc7f3e08f62f572194ddf28b4c"} Feb 03 07:29:21 crc kubenswrapper[4708]: I0203 07:29:21.883532 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"b15f6fec-b538-480e-b5a6-644b39d68a59","Type":"ContainerStarted","Data":"610dd6bc90b623b9f7dbd0602c925da92768918cfd12ac1a125e1538ce859837"} Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.172408 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bqd2z"] Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.173703 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.177535 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.177846 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cz69x" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.179279 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.182467 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bqd2z"] Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.341720 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf72l\" (UniqueName: \"kubernetes.io/projected/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-kube-api-access-wf72l\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.341869 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-scripts\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.341914 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.341975 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-config-data\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.443499 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf72l\" (UniqueName: \"kubernetes.io/projected/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-kube-api-access-wf72l\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.443604 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-scripts\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.443632 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.443671 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-config-data\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.450685 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-config-data\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.452102 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.463436 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-scripts\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.467621 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf72l\" (UniqueName: \"kubernetes.io/projected/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-kube-api-access-wf72l\") pod \"nova-cell0-conductor-db-sync-bqd2z\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.495639 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:29:22 crc kubenswrapper[4708]: I0203 07:29:22.931290 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bqd2z"] Feb 03 07:29:23 crc kubenswrapper[4708]: I0203 07:29:23.000867 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-inspector-0"] Feb 03 07:29:23 crc kubenswrapper[4708]: I0203 07:29:23.902473 4708 generic.go:334] "Generic (PLEG): container finished" podID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerID="d13af06a7369cdd336b14f8dd4b87cf07029834be9499be55cdb6a89d4664991" exitCode=0 Feb 03 07:29:23 crc kubenswrapper[4708]: I0203 07:29:23.902773 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1210a44-81ea-4f31-a263-219a2b36b92e","Type":"ContainerDied","Data":"d13af06a7369cdd336b14f8dd4b87cf07029834be9499be55cdb6a89d4664991"} Feb 03 07:29:23 crc kubenswrapper[4708]: I0203 07:29:23.904889 4708 generic.go:334] "Generic (PLEG): container finished" podID="b15f6fec-b538-480e-b5a6-644b39d68a59" containerID="4740e3835897f232c1f89b26407c3719d1d28cbc7f3e08f62f572194ddf28b4c" exitCode=0 Feb 03 07:29:23 crc kubenswrapper[4708]: I0203 07:29:23.904938 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"b15f6fec-b538-480e-b5a6-644b39d68a59","Type":"ContainerDied","Data":"4740e3835897f232c1f89b26407c3719d1d28cbc7f3e08f62f572194ddf28b4c"} Feb 03 07:29:23 crc kubenswrapper[4708]: I0203 07:29:23.907062 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" event={"ID":"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98","Type":"ContainerStarted","Data":"c0d4da568eb16436287223207ef7bae90ba57c965f2880a908bd6f86a4a11331"} Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.258807 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.378907 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsxcz\" (UniqueName: \"kubernetes.io/projected/b15f6fec-b538-480e-b5a6-644b39d68a59-kube-api-access-zsxcz\") pod \"b15f6fec-b538-480e-b5a6-644b39d68a59\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.378959 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"b15f6fec-b538-480e-b5a6-644b39d68a59\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.378985 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/b15f6fec-b538-480e-b5a6-644b39d68a59-etc-podinfo\") pod \"b15f6fec-b538-480e-b5a6-644b39d68a59\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.379025 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-scripts\") pod \"b15f6fec-b538-480e-b5a6-644b39d68a59\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.379148 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic\") pod \"b15f6fec-b538-480e-b5a6-644b39d68a59\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.379192 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-combined-ca-bundle\") pod \"b15f6fec-b538-480e-b5a6-644b39d68a59\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.379340 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-config\") pod \"b15f6fec-b538-480e-b5a6-644b39d68a59\" (UID: \"b15f6fec-b538-480e-b5a6-644b39d68a59\") " Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.380082 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic-inspector-dhcp-hostsdir" (OuterVolumeSpecName: "var-lib-ironic-inspector-dhcp-hostsdir") pod "b15f6fec-b538-480e-b5a6-644b39d68a59" (UID: "b15f6fec-b538-480e-b5a6-644b39d68a59"). InnerVolumeSpecName "var-lib-ironic-inspector-dhcp-hostsdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.380743 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic" (OuterVolumeSpecName: "var-lib-ironic") pod "b15f6fec-b538-480e-b5a6-644b39d68a59" (UID: "b15f6fec-b538-480e-b5a6-644b39d68a59"). InnerVolumeSpecName "var-lib-ironic". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.385921 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-config" (OuterVolumeSpecName: "config") pod "b15f6fec-b538-480e-b5a6-644b39d68a59" (UID: "b15f6fec-b538-480e-b5a6-644b39d68a59"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.385971 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-scripts" (OuterVolumeSpecName: "scripts") pod "b15f6fec-b538-480e-b5a6-644b39d68a59" (UID: "b15f6fec-b538-480e-b5a6-644b39d68a59"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.386014 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/b15f6fec-b538-480e-b5a6-644b39d68a59-etc-podinfo" (OuterVolumeSpecName: "etc-podinfo") pod "b15f6fec-b538-480e-b5a6-644b39d68a59" (UID: "b15f6fec-b538-480e-b5a6-644b39d68a59"). InnerVolumeSpecName "etc-podinfo". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.397426 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b15f6fec-b538-480e-b5a6-644b39d68a59-kube-api-access-zsxcz" (OuterVolumeSpecName: "kube-api-access-zsxcz") pod "b15f6fec-b538-480e-b5a6-644b39d68a59" (UID: "b15f6fec-b538-480e-b5a6-644b39d68a59"). InnerVolumeSpecName "kube-api-access-zsxcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.419010 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b15f6fec-b538-480e-b5a6-644b39d68a59" (UID: "b15f6fec-b538-480e-b5a6-644b39d68a59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.481216 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.481243 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsxcz\" (UniqueName: \"kubernetes.io/projected/b15f6fec-b538-480e-b5a6-644b39d68a59-kube-api-access-zsxcz\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.481256 4708 reconciler_common.go:293] "Volume detached for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic-inspector-dhcp-hostsdir\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.481267 4708 reconciler_common.go:293] "Volume detached for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/b15f6fec-b538-480e-b5a6-644b39d68a59-etc-podinfo\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.481276 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.481284 4708 reconciler_common.go:293] "Volume detached for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/b15f6fec-b538-480e-b5a6-644b39d68a59-var-lib-ironic\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.481294 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b15f6fec-b538-480e-b5a6-644b39d68a59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.982624 4708 generic.go:334] "Generic (PLEG): container finished" podID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerID="f0d587bd6086a22a2103e5f41b0d489589e1fe13a1fee7356636bc60344ed733" exitCode=0 Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.983018 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1210a44-81ea-4f31-a263-219a2b36b92e","Type":"ContainerDied","Data":"f0d587bd6086a22a2103e5f41b0d489589e1fe13a1fee7356636bc60344ed733"} Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.984833 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"b15f6fec-b538-480e-b5a6-644b39d68a59","Type":"ContainerDied","Data":"610dd6bc90b623b9f7dbd0602c925da92768918cfd12ac1a125e1538ce859837"} Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.984876 4708 scope.go:117] "RemoveContainer" containerID="4740e3835897f232c1f89b26407c3719d1d28cbc7f3e08f62f572194ddf28b4c" Feb 03 07:29:24 crc kubenswrapper[4708]: I0203 07:29:24.985029 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.062345 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-inspector-0"] Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.083415 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-inspector-0"] Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.092820 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ironic-inspector-0"] Feb 03 07:29:25 crc kubenswrapper[4708]: E0203 07:29:25.093210 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b15f6fec-b538-480e-b5a6-644b39d68a59" containerName="ironic-python-agent-init" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.093223 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="b15f6fec-b538-480e-b5a6-644b39d68a59" containerName="ironic-python-agent-init" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.093413 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="b15f6fec-b538-480e-b5a6-644b39d68a59" containerName="ironic-python-agent-init" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.096550 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.100081 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-scripts" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.102532 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ironic-inspector-public-svc" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.102813 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ironic-inspector-config-data" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.102971 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ironic-inspector-internal-svc" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.119117 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-0"] Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.195824 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.195887 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/2365ac26-e49a-4ab3-8781-20c1b697b51d-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.195918 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/2365ac26-e49a-4ab3-8781-20c1b697b51d-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.195940 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-scripts\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.195988 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-config\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.196156 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgbbw\" (UniqueName: \"kubernetes.io/projected/2365ac26-e49a-4ab3-8781-20c1b697b51d-kube-api-access-cgbbw\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.196231 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-public-tls-certs\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.196434 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/2365ac26-e49a-4ab3-8781-20c1b697b51d-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.196573 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-internal-tls-certs\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.298625 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.298689 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/2365ac26-e49a-4ab3-8781-20c1b697b51d-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.298722 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/2365ac26-e49a-4ab3-8781-20c1b697b51d-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.298747 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-scripts\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.298768 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-config\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.298836 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgbbw\" (UniqueName: \"kubernetes.io/projected/2365ac26-e49a-4ab3-8781-20c1b697b51d-kube-api-access-cgbbw\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.298865 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-public-tls-certs\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.299395 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic-inspector-dhcp-hostsdir\" (UniqueName: \"kubernetes.io/empty-dir/2365ac26-e49a-4ab3-8781-20c1b697b51d-var-lib-ironic-inspector-dhcp-hostsdir\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.299456 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/2365ac26-e49a-4ab3-8781-20c1b697b51d-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.299252 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-ironic\" (UniqueName: \"kubernetes.io/empty-dir/2365ac26-e49a-4ab3-8781-20c1b697b51d-var-lib-ironic\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.299497 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-internal-tls-certs\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.307233 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-internal-tls-certs\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.307270 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-combined-ca-bundle\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.307612 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-public-tls-certs\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.308074 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-config\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.310174 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2365ac26-e49a-4ab3-8781-20c1b697b51d-scripts\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.311065 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-podinfo\" (UniqueName: \"kubernetes.io/downward-api/2365ac26-e49a-4ab3-8781-20c1b697b51d-etc-podinfo\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.316297 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgbbw\" (UniqueName: \"kubernetes.io/projected/2365ac26-e49a-4ab3-8781-20c1b697b51d-kube-api-access-cgbbw\") pod \"ironic-inspector-0\" (UID: \"2365ac26-e49a-4ab3-8781-20c1b697b51d\") " pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.361348 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.361667 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.404889 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.404966 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.434680 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ironic-inspector-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.993434 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 07:29:25 crc kubenswrapper[4708]: I0203 07:29:25.993494 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 03 07:29:26 crc kubenswrapper[4708]: I0203 07:29:26.107109 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b15f6fec-b538-480e-b5a6-644b39d68a59" path="/var/lib/kubelet/pods/b15f6fec-b538-480e-b5a6-644b39d68a59/volumes" Feb 03 07:29:26 crc kubenswrapper[4708]: I0203 07:29:26.118117 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:26 crc kubenswrapper[4708]: I0203 07:29:26.118162 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:26 crc kubenswrapper[4708]: I0203 07:29:26.149580 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:26 crc kubenswrapper[4708]: I0203 07:29:26.164655 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:27 crc kubenswrapper[4708]: I0203 07:29:27.003510 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:27 crc kubenswrapper[4708]: I0203 07:29:27.003888 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:27 crc kubenswrapper[4708]: I0203 07:29:27.917261 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 07:29:27 crc kubenswrapper[4708]: I0203 07:29:27.921691 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 03 07:29:28 crc kubenswrapper[4708]: I0203 07:29:28.935754 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:28 crc kubenswrapper[4708]: I0203 07:29:28.940643 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.014826 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.077123 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.077647 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1210a44-81ea-4f31-a263-219a2b36b92e","Type":"ContainerDied","Data":"d04547aedb663d66e55e398710964da340c260985640ff7918117a9927fa7bbc"} Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.077715 4708 scope.go:117] "RemoveContainer" containerID="b102a755355936bffb5efa60355d471e6e85a0bd7b6db9fdfe04a527bd19a8a9" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.173370 4708 scope.go:117] "RemoveContainer" containerID="54bbba2b07befa18370ec31387201631759267c88785ca533ecc209fca7ed35d" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.187181 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-run-httpd\") pod \"e1210a44-81ea-4f31-a263-219a2b36b92e\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.187250 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-scripts\") pod \"e1210a44-81ea-4f31-a263-219a2b36b92e\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.187269 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-log-httpd\") pod \"e1210a44-81ea-4f31-a263-219a2b36b92e\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.187301 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-config-data\") pod \"e1210a44-81ea-4f31-a263-219a2b36b92e\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.187376 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kzgf\" (UniqueName: \"kubernetes.io/projected/e1210a44-81ea-4f31-a263-219a2b36b92e-kube-api-access-6kzgf\") pod \"e1210a44-81ea-4f31-a263-219a2b36b92e\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.187417 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-combined-ca-bundle\") pod \"e1210a44-81ea-4f31-a263-219a2b36b92e\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.187492 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-sg-core-conf-yaml\") pod \"e1210a44-81ea-4f31-a263-219a2b36b92e\" (UID: \"e1210a44-81ea-4f31-a263-219a2b36b92e\") " Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.187711 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e1210a44-81ea-4f31-a263-219a2b36b92e" (UID: "e1210a44-81ea-4f31-a263-219a2b36b92e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.187974 4708 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.190478 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e1210a44-81ea-4f31-a263-219a2b36b92e" (UID: "e1210a44-81ea-4f31-a263-219a2b36b92e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.192011 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-scripts" (OuterVolumeSpecName: "scripts") pod "e1210a44-81ea-4f31-a263-219a2b36b92e" (UID: "e1210a44-81ea-4f31-a263-219a2b36b92e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.204071 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1210a44-81ea-4f31-a263-219a2b36b92e-kube-api-access-6kzgf" (OuterVolumeSpecName: "kube-api-access-6kzgf") pod "e1210a44-81ea-4f31-a263-219a2b36b92e" (UID: "e1210a44-81ea-4f31-a263-219a2b36b92e"). InnerVolumeSpecName "kube-api-access-6kzgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.206688 4708 scope.go:117] "RemoveContainer" containerID="d13af06a7369cdd336b14f8dd4b87cf07029834be9499be55cdb6a89d4664991" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.254181 4708 scope.go:117] "RemoveContainer" containerID="f0d587bd6086a22a2103e5f41b0d489589e1fe13a1fee7356636bc60344ed733" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.260914 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e1210a44-81ea-4f31-a263-219a2b36b92e" (UID: "e1210a44-81ea-4f31-a263-219a2b36b92e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.289329 4708 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.289357 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.289368 4708 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1210a44-81ea-4f31-a263-219a2b36b92e-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.289378 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kzgf\" (UniqueName: \"kubernetes.io/projected/e1210a44-81ea-4f31-a263-219a2b36b92e-kube-api-access-6kzgf\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.441932 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-config-data" (OuterVolumeSpecName: "config-data") pod "e1210a44-81ea-4f31-a263-219a2b36b92e" (UID: "e1210a44-81ea-4f31-a263-219a2b36b92e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.502725 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1210a44-81ea-4f31-a263-219a2b36b92e" (UID: "e1210a44-81ea-4f31-a263-219a2b36b92e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.508213 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.508256 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1210a44-81ea-4f31-a263-219a2b36b92e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.609044 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ironic-inspector-0"] Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.739055 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.759446 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.765374 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:30 crc kubenswrapper[4708]: E0203 07:29:30.766483 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="ceilometer-notification-agent" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.766531 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="ceilometer-notification-agent" Feb 03 07:29:30 crc kubenswrapper[4708]: E0203 07:29:30.766546 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="sg-core" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.766552 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="sg-core" Feb 03 07:29:30 crc kubenswrapper[4708]: E0203 07:29:30.766565 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="ceilometer-central-agent" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.766571 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="ceilometer-central-agent" Feb 03 07:29:30 crc kubenswrapper[4708]: E0203 07:29:30.766625 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="proxy-httpd" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.766635 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="proxy-httpd" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.766890 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="ceilometer-notification-agent" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.766915 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="sg-core" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.766924 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="proxy-httpd" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.766939 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" containerName="ceilometer-central-agent" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.770369 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.773110 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.773425 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.794334 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.917867 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.917928 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.918137 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-scripts\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.918264 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-run-httpd\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.918336 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-log-httpd\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.918377 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqhsr\" (UniqueName: \"kubernetes.io/projected/8b13517d-a5f6-4d5a-905d-79c0d0876c74-kube-api-access-dqhsr\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:30 crc kubenswrapper[4708]: I0203 07:29:30.918398 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-config-data\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.019932 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-run-httpd\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.020012 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-log-httpd\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.020057 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-config-data\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.020081 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqhsr\" (UniqueName: \"kubernetes.io/projected/8b13517d-a5f6-4d5a-905d-79c0d0876c74-kube-api-access-dqhsr\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.020175 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.020213 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.020253 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-scripts\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.020482 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-run-httpd\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.020989 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-log-httpd\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.024997 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.025103 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-scripts\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.026240 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-config-data\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.026918 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.038805 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqhsr\" (UniqueName: \"kubernetes.io/projected/8b13517d-a5f6-4d5a-905d-79c0d0876c74-kube-api-access-dqhsr\") pod \"ceilometer-0\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.087900 4708 generic.go:334] "Generic (PLEG): container finished" podID="2365ac26-e49a-4ab3-8781-20c1b697b51d" containerID="069706bfba0de84fb0ed2f5bf6da5b34a9f1d3bf0614d10659f3a9b04311373d" exitCode=0 Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.087955 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"2365ac26-e49a-4ab3-8781-20c1b697b51d","Type":"ContainerDied","Data":"069706bfba0de84fb0ed2f5bf6da5b34a9f1d3bf0614d10659f3a9b04311373d"} Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.088050 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"2365ac26-e49a-4ab3-8781-20c1b697b51d","Type":"ContainerStarted","Data":"f2559760c354ba16ff79811d10c4dd41f79798438dc29279f915ff6fdc4d084a"} Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.090629 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"db624ad8-1c0f-4100-b3a2-4c80e02c1b03","Type":"ContainerStarted","Data":"7b8bc4af184fd1f345e234ea71be7a715c4609b6fb546f70cefa41905daaf532"} Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.096105 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"361821ae-c957-4e31-bb9b-6d659aaceec4","Type":"ContainerStarted","Data":"ee78c5d38024a2b9c525a47d9b2d0808da6654c8e2a884b50ec46a9eff9cbb0a"} Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.096583 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.157163 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.8668017150000003 podStartE2EDuration="41.157146793s" podCreationTimestamp="2026-02-03 07:28:50 +0000 UTC" firstStartedPulling="2026-02-03 07:28:51.740344212 +0000 UTC m=+1110.722291009" lastFinishedPulling="2026-02-03 07:29:30.03068927 +0000 UTC m=+1149.012636087" observedRunningTime="2026-02-03 07:29:31.14854923 +0000 UTC m=+1150.130496047" watchObservedRunningTime="2026-02-03 07:29:31.157146793 +0000 UTC m=+1150.139093590" Feb 03 07:29:31 crc kubenswrapper[4708]: I0203 07:29:31.648917 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:32 crc kubenswrapper[4708]: I0203 07:29:32.112120 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1210a44-81ea-4f31-a263-219a2b36b92e" path="/var/lib/kubelet/pods/e1210a44-81ea-4f31-a263-219a2b36b92e/volumes" Feb 03 07:29:32 crc kubenswrapper[4708]: I0203 07:29:32.120682 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8b13517d-a5f6-4d5a-905d-79c0d0876c74","Type":"ContainerStarted","Data":"bea406ec91f8290e7bd1e1deb521170eb1898b0e83ccbc678a13cbf96658838f"} Feb 03 07:29:32 crc kubenswrapper[4708]: I0203 07:29:32.126197 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"2365ac26-e49a-4ab3-8781-20c1b697b51d","Type":"ContainerStarted","Data":"0781d029b8412ec128741b1afb8b02fdf8812fbd3465046ad6ff23d3c61e1eea"} Feb 03 07:29:33 crc kubenswrapper[4708]: I0203 07:29:33.139330 4708 generic.go:334] "Generic (PLEG): container finished" podID="2365ac26-e49a-4ab3-8781-20c1b697b51d" containerID="0781d029b8412ec128741b1afb8b02fdf8812fbd3465046ad6ff23d3c61e1eea" exitCode=0 Feb 03 07:29:33 crc kubenswrapper[4708]: I0203 07:29:33.139415 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"2365ac26-e49a-4ab3-8781-20c1b697b51d","Type":"ContainerDied","Data":"0781d029b8412ec128741b1afb8b02fdf8812fbd3465046ad6ff23d3c61e1eea"} Feb 03 07:29:33 crc kubenswrapper[4708]: I0203 07:29:33.140026 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"2365ac26-e49a-4ab3-8781-20c1b697b51d","Type":"ContainerStarted","Data":"6a4ce2f7f62b053fce5903a828fbacef088e189199d19f797526a2be433c892e"} Feb 03 07:29:33 crc kubenswrapper[4708]: I0203 07:29:33.142251 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8b13517d-a5f6-4d5a-905d-79c0d0876c74","Type":"ContainerStarted","Data":"1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83"} Feb 03 07:29:34 crc kubenswrapper[4708]: I0203 07:29:34.155192 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"2365ac26-e49a-4ab3-8781-20c1b697b51d","Type":"ContainerStarted","Data":"990ae387329bba834b2992766e81cf36f78321a0389f9deccdd2ad050b07b26a"} Feb 03 07:29:37 crc kubenswrapper[4708]: I0203 07:29:37.193430 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"2365ac26-e49a-4ab3-8781-20c1b697b51d","Type":"ContainerStarted","Data":"9a176a00a7cb85dfa4d699382353158cffd997d0dfb11326cffbb6ec129eae9f"} Feb 03 07:29:38 crc kubenswrapper[4708]: I0203 07:29:38.212510 4708 generic.go:334] "Generic (PLEG): container finished" podID="2365ac26-e49a-4ab3-8781-20c1b697b51d" containerID="990ae387329bba834b2992766e81cf36f78321a0389f9deccdd2ad050b07b26a" exitCode=0 Feb 03 07:29:38 crc kubenswrapper[4708]: I0203 07:29:38.212901 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"2365ac26-e49a-4ab3-8781-20c1b697b51d","Type":"ContainerDied","Data":"990ae387329bba834b2992766e81cf36f78321a0389f9deccdd2ad050b07b26a"} Feb 03 07:29:39 crc kubenswrapper[4708]: I0203 07:29:39.826574 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:29:45 crc kubenswrapper[4708]: E0203 07:29:45.000103 4708 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified" Feb 03 07:29:45 crc kubenswrapper[4708]: E0203 07:29:45.000767 4708 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:nova-cell0-conductor-db-sync,Image:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CELL_NAME,Value:cell0,ValueFrom:nil,},EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:false,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/kolla/config_files/config.json,SubPath:nova-conductor-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wf72l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42436,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-cell0-conductor-db-sync-bqd2z_openstack(5def47d5-3c2f-4cfb-acc1-63b2c12e5e98): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 03 07:29:45 crc kubenswrapper[4708]: E0203 07:29:45.002131 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" podUID="5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" Feb 03 07:29:45 crc kubenswrapper[4708]: E0203 07:29:45.279068 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified\\\"\"" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" podUID="5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" Feb 03 07:29:46 crc kubenswrapper[4708]: I0203 07:29:46.288832 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"2365ac26-e49a-4ab3-8781-20c1b697b51d","Type":"ContainerStarted","Data":"1eda27228af0073e2f5ff94e1bb1a43603591146a202206fbe6648b77f575ad9"} Feb 03 07:29:46 crc kubenswrapper[4708]: I0203 07:29:46.289319 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-inspector-0" Feb 03 07:29:46 crc kubenswrapper[4708]: I0203 07:29:46.289603 4708 scope.go:117] "RemoveContainer" containerID="990ae387329bba834b2992766e81cf36f78321a0389f9deccdd2ad050b07b26a" Feb 03 07:29:46 crc kubenswrapper[4708]: I0203 07:29:46.291691 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-inspector-0" Feb 03 07:29:46 crc kubenswrapper[4708]: I0203 07:29:46.294565 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8b13517d-a5f6-4d5a-905d-79c0d0876c74","Type":"ContainerStarted","Data":"2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e"} Feb 03 07:29:46 crc kubenswrapper[4708]: I0203 07:29:46.294600 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8b13517d-a5f6-4d5a-905d-79c0d0876c74","Type":"ContainerStarted","Data":"2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f"} Feb 03 07:29:47 crc kubenswrapper[4708]: I0203 07:29:47.308128 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-inspector-0" event={"ID":"2365ac26-e49a-4ab3-8781-20c1b697b51d","Type":"ContainerStarted","Data":"49a5adda5e09d2381824a91bf7fb735381a1d270123dc0d9f9e88d8d2fb38d10"} Feb 03 07:29:47 crc kubenswrapper[4708]: I0203 07:29:47.344318 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-inspector-0" podStartSLOduration=22.344298881 podStartE2EDuration="22.344298881s" podCreationTimestamp="2026-02-03 07:29:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:29:47.336626621 +0000 UTC m=+1166.318573428" watchObservedRunningTime="2026-02-03 07:29:47.344298881 +0000 UTC m=+1166.326245688" Feb 03 07:29:48 crc kubenswrapper[4708]: I0203 07:29:48.325334 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8b13517d-a5f6-4d5a-905d-79c0d0876c74","Type":"ContainerStarted","Data":"00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0"} Feb 03 07:29:48 crc kubenswrapper[4708]: I0203 07:29:48.325474 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="ceilometer-central-agent" containerID="cri-o://1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83" gracePeriod=30 Feb 03 07:29:48 crc kubenswrapper[4708]: I0203 07:29:48.325752 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="ceilometer-notification-agent" containerID="cri-o://2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f" gracePeriod=30 Feb 03 07:29:48 crc kubenswrapper[4708]: I0203 07:29:48.325727 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="sg-core" containerID="cri-o://2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e" gracePeriod=30 Feb 03 07:29:48 crc kubenswrapper[4708]: I0203 07:29:48.325847 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:29:48 crc kubenswrapper[4708]: I0203 07:29:48.325970 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="proxy-httpd" containerID="cri-o://00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0" gracePeriod=30 Feb 03 07:29:48 crc kubenswrapper[4708]: I0203 07:29:48.363998 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.034505266 podStartE2EDuration="18.363977543s" podCreationTimestamp="2026-02-03 07:29:30 +0000 UTC" firstStartedPulling="2026-02-03 07:29:31.656764536 +0000 UTC m=+1150.638711353" lastFinishedPulling="2026-02-03 07:29:47.986236813 +0000 UTC m=+1166.968183630" observedRunningTime="2026-02-03 07:29:48.351638248 +0000 UTC m=+1167.333585055" watchObservedRunningTime="2026-02-03 07:29:48.363977543 +0000 UTC m=+1167.345924350" Feb 03 07:29:49 crc kubenswrapper[4708]: I0203 07:29:49.340251 4708 generic.go:334] "Generic (PLEG): container finished" podID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerID="2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e" exitCode=2 Feb 03 07:29:49 crc kubenswrapper[4708]: I0203 07:29:49.340281 4708 generic.go:334] "Generic (PLEG): container finished" podID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerID="2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f" exitCode=0 Feb 03 07:29:49 crc kubenswrapper[4708]: I0203 07:29:49.340288 4708 generic.go:334] "Generic (PLEG): container finished" podID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerID="1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83" exitCode=0 Feb 03 07:29:49 crc kubenswrapper[4708]: I0203 07:29:49.340307 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8b13517d-a5f6-4d5a-905d-79c0d0876c74","Type":"ContainerDied","Data":"2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e"} Feb 03 07:29:49 crc kubenswrapper[4708]: I0203 07:29:49.340333 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8b13517d-a5f6-4d5a-905d-79c0d0876c74","Type":"ContainerDied","Data":"2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f"} Feb 03 07:29:49 crc kubenswrapper[4708]: I0203 07:29:49.340342 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8b13517d-a5f6-4d5a-905d-79c0d0876c74","Type":"ContainerDied","Data":"1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83"} Feb 03 07:29:50 crc kubenswrapper[4708]: I0203 07:29:50.435210 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-inspector-0" Feb 03 07:29:50 crc kubenswrapper[4708]: I0203 07:29:50.436590 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-inspector-0" Feb 03 07:29:55 crc kubenswrapper[4708]: I0203 07:29:55.435313 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ironic-inspector-0" Feb 03 07:29:55 crc kubenswrapper[4708]: I0203 07:29:55.436224 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ironic-inspector-0" Feb 03 07:29:55 crc kubenswrapper[4708]: I0203 07:29:55.469231 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ironic-inspector-0" Feb 03 07:29:55 crc kubenswrapper[4708]: I0203 07:29:55.472855 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ironic-inspector-0" Feb 03 07:29:56 crc kubenswrapper[4708]: I0203 07:29:56.432874 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-inspector-0" Feb 03 07:29:56 crc kubenswrapper[4708]: I0203 07:29:56.441142 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-inspector-0" Feb 03 07:29:57 crc kubenswrapper[4708]: I0203 07:29:57.426842 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" event={"ID":"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98","Type":"ContainerStarted","Data":"90f4dedde762dba325f889559509a4ef78df8e437581ea0d6e7ce7f143aab114"} Feb 03 07:29:57 crc kubenswrapper[4708]: I0203 07:29:57.452654 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" podStartSLOduration=1.8238900120000001 podStartE2EDuration="35.452628086s" podCreationTimestamp="2026-02-03 07:29:22 +0000 UTC" firstStartedPulling="2026-02-03 07:29:22.94051621 +0000 UTC m=+1141.922463017" lastFinishedPulling="2026-02-03 07:29:56.569254284 +0000 UTC m=+1175.551201091" observedRunningTime="2026-02-03 07:29:57.441400479 +0000 UTC m=+1176.423347296" watchObservedRunningTime="2026-02-03 07:29:57.452628086 +0000 UTC m=+1176.434574903" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.152127 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw"] Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.154347 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.156651 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.157398 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.172589 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw"] Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.243860 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/72f8a2f7-6047-41a6-9628-d51a5192e27f-secret-volume\") pod \"collect-profiles-29501730-g9clw\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.244040 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc8tw\" (UniqueName: \"kubernetes.io/projected/72f8a2f7-6047-41a6-9628-d51a5192e27f-kube-api-access-jc8tw\") pod \"collect-profiles-29501730-g9clw\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.244132 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/72f8a2f7-6047-41a6-9628-d51a5192e27f-config-volume\") pod \"collect-profiles-29501730-g9clw\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.346074 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc8tw\" (UniqueName: \"kubernetes.io/projected/72f8a2f7-6047-41a6-9628-d51a5192e27f-kube-api-access-jc8tw\") pod \"collect-profiles-29501730-g9clw\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.346159 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/72f8a2f7-6047-41a6-9628-d51a5192e27f-config-volume\") pod \"collect-profiles-29501730-g9clw\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.346215 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/72f8a2f7-6047-41a6-9628-d51a5192e27f-secret-volume\") pod \"collect-profiles-29501730-g9clw\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.347821 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/72f8a2f7-6047-41a6-9628-d51a5192e27f-config-volume\") pod \"collect-profiles-29501730-g9clw\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.353197 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/72f8a2f7-6047-41a6-9628-d51a5192e27f-secret-volume\") pod \"collect-profiles-29501730-g9clw\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.371751 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc8tw\" (UniqueName: \"kubernetes.io/projected/72f8a2f7-6047-41a6-9628-d51a5192e27f-kube-api-access-jc8tw\") pod \"collect-profiles-29501730-g9clw\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.473951 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:00 crc kubenswrapper[4708]: I0203 07:30:00.966000 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw"] Feb 03 07:30:01 crc kubenswrapper[4708]: I0203 07:30:01.102171 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Feb 03 07:30:01 crc kubenswrapper[4708]: I0203 07:30:01.467825 4708 generic.go:334] "Generic (PLEG): container finished" podID="72f8a2f7-6047-41a6-9628-d51a5192e27f" containerID="ff10c95b1919ba953821a2db1fcd6fd2920315373868d148006569c93947df60" exitCode=0 Feb 03 07:30:01 crc kubenswrapper[4708]: I0203 07:30:01.468095 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" event={"ID":"72f8a2f7-6047-41a6-9628-d51a5192e27f","Type":"ContainerDied","Data":"ff10c95b1919ba953821a2db1fcd6fd2920315373868d148006569c93947df60"} Feb 03 07:30:01 crc kubenswrapper[4708]: I0203 07:30:01.468122 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" event={"ID":"72f8a2f7-6047-41a6-9628-d51a5192e27f","Type":"ContainerStarted","Data":"80dcb75967bcaf296c5b783dbe90c09327725870ad7c1e7180529aa3d17e2c1a"} Feb 03 07:30:02 crc kubenswrapper[4708]: I0203 07:30:02.826811 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:02 crc kubenswrapper[4708]: I0203 07:30:02.892749 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/72f8a2f7-6047-41a6-9628-d51a5192e27f-config-volume\") pod \"72f8a2f7-6047-41a6-9628-d51a5192e27f\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " Feb 03 07:30:02 crc kubenswrapper[4708]: I0203 07:30:02.893043 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/72f8a2f7-6047-41a6-9628-d51a5192e27f-secret-volume\") pod \"72f8a2f7-6047-41a6-9628-d51a5192e27f\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " Feb 03 07:30:02 crc kubenswrapper[4708]: I0203 07:30:02.893164 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc8tw\" (UniqueName: \"kubernetes.io/projected/72f8a2f7-6047-41a6-9628-d51a5192e27f-kube-api-access-jc8tw\") pod \"72f8a2f7-6047-41a6-9628-d51a5192e27f\" (UID: \"72f8a2f7-6047-41a6-9628-d51a5192e27f\") " Feb 03 07:30:02 crc kubenswrapper[4708]: I0203 07:30:02.893957 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72f8a2f7-6047-41a6-9628-d51a5192e27f-config-volume" (OuterVolumeSpecName: "config-volume") pod "72f8a2f7-6047-41a6-9628-d51a5192e27f" (UID: "72f8a2f7-6047-41a6-9628-d51a5192e27f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:30:02 crc kubenswrapper[4708]: I0203 07:30:02.899637 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72f8a2f7-6047-41a6-9628-d51a5192e27f-kube-api-access-jc8tw" (OuterVolumeSpecName: "kube-api-access-jc8tw") pod "72f8a2f7-6047-41a6-9628-d51a5192e27f" (UID: "72f8a2f7-6047-41a6-9628-d51a5192e27f"). InnerVolumeSpecName "kube-api-access-jc8tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:02 crc kubenswrapper[4708]: I0203 07:30:02.908631 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72f8a2f7-6047-41a6-9628-d51a5192e27f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "72f8a2f7-6047-41a6-9628-d51a5192e27f" (UID: "72f8a2f7-6047-41a6-9628-d51a5192e27f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:02 crc kubenswrapper[4708]: I0203 07:30:02.995392 4708 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/72f8a2f7-6047-41a6-9628-d51a5192e27f-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:02 crc kubenswrapper[4708]: I0203 07:30:02.995436 4708 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/72f8a2f7-6047-41a6-9628-d51a5192e27f-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:02 crc kubenswrapper[4708]: I0203 07:30:02.995450 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc8tw\" (UniqueName: \"kubernetes.io/projected/72f8a2f7-6047-41a6-9628-d51a5192e27f-kube-api-access-jc8tw\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:03 crc kubenswrapper[4708]: I0203 07:30:03.486575 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" event={"ID":"72f8a2f7-6047-41a6-9628-d51a5192e27f","Type":"ContainerDied","Data":"80dcb75967bcaf296c5b783dbe90c09327725870ad7c1e7180529aa3d17e2c1a"} Feb 03 07:30:03 crc kubenswrapper[4708]: I0203 07:30:03.486616 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80dcb75967bcaf296c5b783dbe90c09327725870ad7c1e7180529aa3d17e2c1a" Feb 03 07:30:03 crc kubenswrapper[4708]: I0203 07:30:03.486655 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501730-g9clw" Feb 03 07:30:08 crc kubenswrapper[4708]: I0203 07:30:08.548840 4708 generic.go:334] "Generic (PLEG): container finished" podID="5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" containerID="90f4dedde762dba325f889559509a4ef78df8e437581ea0d6e7ce7f143aab114" exitCode=0 Feb 03 07:30:08 crc kubenswrapper[4708]: I0203 07:30:08.548941 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" event={"ID":"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98","Type":"ContainerDied","Data":"90f4dedde762dba325f889559509a4ef78df8e437581ea0d6e7ce7f143aab114"} Feb 03 07:30:09 crc kubenswrapper[4708]: I0203 07:30:09.926931 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.039899 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-config-data\") pod \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.040088 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-combined-ca-bundle\") pod \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.040257 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf72l\" (UniqueName: \"kubernetes.io/projected/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-kube-api-access-wf72l\") pod \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.040336 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-scripts\") pod \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\" (UID: \"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98\") " Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.045601 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-scripts" (OuterVolumeSpecName: "scripts") pod "5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" (UID: "5def47d5-3c2f-4cfb-acc1-63b2c12e5e98"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.047069 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-kube-api-access-wf72l" (OuterVolumeSpecName: "kube-api-access-wf72l") pod "5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" (UID: "5def47d5-3c2f-4cfb-acc1-63b2c12e5e98"). InnerVolumeSpecName "kube-api-access-wf72l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.069130 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-config-data" (OuterVolumeSpecName: "config-data") pod "5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" (UID: "5def47d5-3c2f-4cfb-acc1-63b2c12e5e98"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.090835 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" (UID: "5def47d5-3c2f-4cfb-acc1-63b2c12e5e98"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.142895 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.142927 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf72l\" (UniqueName: \"kubernetes.io/projected/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-kube-api-access-wf72l\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.142940 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.142951 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.580824 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" event={"ID":"5def47d5-3c2f-4cfb-acc1-63b2c12e5e98","Type":"ContainerDied","Data":"c0d4da568eb16436287223207ef7bae90ba57c965f2880a908bd6f86a4a11331"} Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.581145 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0d4da568eb16436287223207ef7bae90ba57c965f2880a908bd6f86a4a11331" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.580939 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-bqd2z" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.720909 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 07:30:10 crc kubenswrapper[4708]: E0203 07:30:10.721486 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72f8a2f7-6047-41a6-9628-d51a5192e27f" containerName="collect-profiles" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.721517 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="72f8a2f7-6047-41a6-9628-d51a5192e27f" containerName="collect-profiles" Feb 03 07:30:10 crc kubenswrapper[4708]: E0203 07:30:10.721542 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" containerName="nova-cell0-conductor-db-sync" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.721552 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" containerName="nova-cell0-conductor-db-sync" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.721841 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="72f8a2f7-6047-41a6-9628-d51a5192e27f" containerName="collect-profiles" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.721886 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" containerName="nova-cell0-conductor-db-sync" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.725037 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.727227 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cz69x" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.727627 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.749916 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.857102 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58ec9777-9aec-4d92-a3a5-6266f6288046-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"58ec9777-9aec-4d92-a3a5-6266f6288046\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.857526 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ec9777-9aec-4d92-a3a5-6266f6288046-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"58ec9777-9aec-4d92-a3a5-6266f6288046\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.857752 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pndt\" (UniqueName: \"kubernetes.io/projected/58ec9777-9aec-4d92-a3a5-6266f6288046-kube-api-access-7pndt\") pod \"nova-cell0-conductor-0\" (UID: \"58ec9777-9aec-4d92-a3a5-6266f6288046\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.960317 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pndt\" (UniqueName: \"kubernetes.io/projected/58ec9777-9aec-4d92-a3a5-6266f6288046-kube-api-access-7pndt\") pod \"nova-cell0-conductor-0\" (UID: \"58ec9777-9aec-4d92-a3a5-6266f6288046\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.960491 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58ec9777-9aec-4d92-a3a5-6266f6288046-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"58ec9777-9aec-4d92-a3a5-6266f6288046\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.960525 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ec9777-9aec-4d92-a3a5-6266f6288046-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"58ec9777-9aec-4d92-a3a5-6266f6288046\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.965704 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58ec9777-9aec-4d92-a3a5-6266f6288046-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"58ec9777-9aec-4d92-a3a5-6266f6288046\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.975686 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ec9777-9aec-4d92-a3a5-6266f6288046-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"58ec9777-9aec-4d92-a3a5-6266f6288046\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:10 crc kubenswrapper[4708]: I0203 07:30:10.980433 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pndt\" (UniqueName: \"kubernetes.io/projected/58ec9777-9aec-4d92-a3a5-6266f6288046-kube-api-access-7pndt\") pod \"nova-cell0-conductor-0\" (UID: \"58ec9777-9aec-4d92-a3a5-6266f6288046\") " pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:11 crc kubenswrapper[4708]: I0203 07:30:11.046714 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:11 crc kubenswrapper[4708]: I0203 07:30:11.489946 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 03 07:30:11 crc kubenswrapper[4708]: I0203 07:30:11.590159 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"58ec9777-9aec-4d92-a3a5-6266f6288046","Type":"ContainerStarted","Data":"f14b6dfcb273c4bd5c5e70feea198902ddc4cd94f8644ecccc7f6a7c6fc0a16a"} Feb 03 07:30:12 crc kubenswrapper[4708]: I0203 07:30:12.599188 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"58ec9777-9aec-4d92-a3a5-6266f6288046","Type":"ContainerStarted","Data":"d5be9b35abe052ea47d82870b4b9de2e64fea5647d6f4a91ff853f0d853267cf"} Feb 03 07:30:12 crc kubenswrapper[4708]: I0203 07:30:12.599557 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:12 crc kubenswrapper[4708]: I0203 07:30:12.620184 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.620166995 podStartE2EDuration="2.620166995s" podCreationTimestamp="2026-02-03 07:30:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:30:12.614449094 +0000 UTC m=+1191.596395891" watchObservedRunningTime="2026-02-03 07:30:12.620166995 +0000 UTC m=+1191.602113792" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.077104 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.577082 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-6gqmd"] Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.579262 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.581783 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.584779 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.609222 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6gqmd"] Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.650514 4708 generic.go:334] "Generic (PLEG): container finished" podID="361821ae-c957-4e31-bb9b-6d659aaceec4" containerID="ee78c5d38024a2b9c525a47d9b2d0808da6654c8e2a884b50ec46a9eff9cbb0a" exitCode=0 Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.650571 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"361821ae-c957-4e31-bb9b-6d659aaceec4","Type":"ContainerDied","Data":"ee78c5d38024a2b9c525a47d9b2d0808da6654c8e2a884b50ec46a9eff9cbb0a"} Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.676980 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djhzb\" (UniqueName: \"kubernetes.io/projected/741b999e-f896-4960-8f9c-e9b4aade9039-kube-api-access-djhzb\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.677050 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.677083 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-scripts\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.677166 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-config-data\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.758447 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.759640 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.767696 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.779367 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-config-data\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.779451 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djhzb\" (UniqueName: \"kubernetes.io/projected/741b999e-f896-4960-8f9c-e9b4aade9039-kube-api-access-djhzb\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.779505 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.779528 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-scripts\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.782391 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.784358 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.794535 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-scripts\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.797610 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-config-data\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.830122 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.832159 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.841471 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.847448 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.851405 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djhzb\" (UniqueName: \"kubernetes.io/projected/741b999e-f896-4960-8f9c-e9b4aade9039-kube-api-access-djhzb\") pod \"nova-cell0-cell-mapping-6gqmd\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.886820 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqwpf\" (UniqueName: \"kubernetes.io/projected/fdc4c642-cd6a-4104-9594-647a3356296a-kube-api-access-hqwpf\") pod \"nova-cell1-novncproxy-0\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.886881 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.887034 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.920855 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.953155 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.954673 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.964549 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 07:30:16 crc kubenswrapper[4708]: I0203 07:30:16.990085 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.004541 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-config-data\") pod \"nova-scheduler-0\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.004633 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.004671 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfcjr\" (UniqueName: \"kubernetes.io/projected/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-kube-api-access-mfcjr\") pod \"nova-scheduler-0\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.004741 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqwpf\" (UniqueName: \"kubernetes.io/projected/fdc4c642-cd6a-4104-9594-647a3356296a-kube-api-access-hqwpf\") pod \"nova-cell1-novncproxy-0\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.004770 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.004806 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.013474 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.013971 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.019128 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.022237 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.026674 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.108348 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-config-data\") pod \"nova-scheduler-0\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.112391 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ncxx\" (UniqueName: \"kubernetes.io/projected/fcae63fb-2971-4a50-a678-1d3d3a598a4b-kube-api-access-6ncxx\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.112675 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.112857 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfcjr\" (UniqueName: \"kubernetes.io/projected/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-kube-api-access-mfcjr\") pod \"nova-scheduler-0\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.113006 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-config-data\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.113134 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcae63fb-2971-4a50-a678-1d3d3a598a4b-logs\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.113215 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.113292 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6shb5\" (UniqueName: \"kubernetes.io/projected/72179ea9-93e3-4e20-907f-bcf317cd4f80-kube-api-access-6shb5\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.113437 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-config-data\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.113608 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72179ea9-93e3-4e20-907f-bcf317cd4f80-logs\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.113694 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.120530 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.122862 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqwpf\" (UniqueName: \"kubernetes.io/projected/fdc4c642-cd6a-4104-9594-647a3356296a-kube-api-access-hqwpf\") pod \"nova-cell1-novncproxy-0\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.134324 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-config-data\") pod \"nova-scheduler-0\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.142057 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfcjr\" (UniqueName: \"kubernetes.io/projected/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-kube-api-access-mfcjr\") pod \"nova-scheduler-0\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.149774 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.214929 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rzctf"] Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.219860 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.220634 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.221032 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ncxx\" (UniqueName: \"kubernetes.io/projected/fcae63fb-2971-4a50-a678-1d3d3a598a4b-kube-api-access-6ncxx\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.221417 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.221491 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-config-data\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.221530 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcae63fb-2971-4a50-a678-1d3d3a598a4b-logs\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.221552 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.221575 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6shb5\" (UniqueName: \"kubernetes.io/projected/72179ea9-93e3-4e20-907f-bcf317cd4f80-kube-api-access-6shb5\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.221621 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-config-data\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.221660 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72179ea9-93e3-4e20-907f-bcf317cd4f80-logs\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.222010 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcae63fb-2971-4a50-a678-1d3d3a598a4b-logs\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.222468 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72179ea9-93e3-4e20-907f-bcf317cd4f80-logs\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.225216 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.227312 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-config-data\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.227325 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.237359 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.246745 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-config-data\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.247279 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6shb5\" (UniqueName: \"kubernetes.io/projected/72179ea9-93e3-4e20-907f-bcf317cd4f80-kube-api-access-6shb5\") pod \"nova-metadata-0\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.249138 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ncxx\" (UniqueName: \"kubernetes.io/projected/fcae63fb-2971-4a50-a678-1d3d3a598a4b-kube-api-access-6ncxx\") pod \"nova-api-0\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.258914 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rzctf"] Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.286305 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.323712 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-config\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.323776 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.323893 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.323926 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.323942 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkxg2\" (UniqueName: \"kubernetes.io/projected/e7bbff83-97e5-41e1-8445-0c7deba26930-kube-api-access-zkxg2\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.324160 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-svc\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.403220 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.426306 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.426353 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.426367 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkxg2\" (UniqueName: \"kubernetes.io/projected/e7bbff83-97e5-41e1-8445-0c7deba26930-kube-api-access-zkxg2\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.426392 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-svc\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.426478 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-config\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.426509 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.427488 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.428059 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.428657 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-config\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.428864 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-svc\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.429182 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.461992 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkxg2\" (UniqueName: \"kubernetes.io/projected/e7bbff83-97e5-41e1-8445-0c7deba26930-kube-api-access-zkxg2\") pod \"dnsmasq-dns-757b4f8459-rzctf\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.557392 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.592621 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6gqmd"] Feb 03 07:30:17 crc kubenswrapper[4708]: W0203 07:30:17.610894 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod741b999e_f896_4960_8f9c_e9b4aade9039.slice/crio-4c9b0f80bf62e731a4324184edea3d523ed79e89d59ffa44a9a4e97190f98e99 WatchSource:0}: Error finding container 4c9b0f80bf62e731a4324184edea3d523ed79e89d59ffa44a9a4e97190f98e99: Status 404 returned error can't find the container with id 4c9b0f80bf62e731a4324184edea3d523ed79e89d59ffa44a9a4e97190f98e99 Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.646773 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5wzvm"] Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.651009 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.665668 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.666207 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.710520 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5wzvm"] Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.730278 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.732945 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.733054 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-scripts\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.733426 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-config-data\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.733482 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twtcz\" (UniqueName: \"kubernetes.io/projected/65d33071-644f-4642-a6b3-ee141d7d6360-kube-api-access-twtcz\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.742896 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6gqmd" event={"ID":"741b999e-f896-4960-8f9c-e9b4aade9039","Type":"ContainerStarted","Data":"4c9b0f80bf62e731a4324184edea3d523ed79e89d59ffa44a9a4e97190f98e99"} Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.783642 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.843851 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-config-data\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.843941 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twtcz\" (UniqueName: \"kubernetes.io/projected/65d33071-644f-4642-a6b3-ee141d7d6360-kube-api-access-twtcz\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.844006 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.844107 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-scripts\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.848288 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-scripts\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.849758 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-config-data\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.851027 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.864351 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twtcz\" (UniqueName: \"kubernetes.io/projected/65d33071-644f-4642-a6b3-ee141d7d6360-kube-api-access-twtcz\") pod \"nova-cell1-conductor-db-sync-5wzvm\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.924472 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:17 crc kubenswrapper[4708]: I0203 07:30:17.955338 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:30:17 crc kubenswrapper[4708]: W0203 07:30:17.967234 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfcae63fb_2971_4a50_a678_1d3d3a598a4b.slice/crio-c35d53302845791c8cae82f4227e4c09df44121cb733066280dc0b1d706cfa26 WatchSource:0}: Error finding container c35d53302845791c8cae82f4227e4c09df44121cb733066280dc0b1d706cfa26: Status 404 returned error can't find the container with id c35d53302845791c8cae82f4227e4c09df44121cb733066280dc0b1d706cfa26 Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.162502 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.170554 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rzctf"] Feb 03 07:30:18 crc kubenswrapper[4708]: W0203 07:30:18.186579 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72179ea9_93e3_4e20_907f_bcf317cd4f80.slice/crio-8b91bdda83f099f5be1141379c1f18a232f82be1870a0902aa1ec810e43f3d6a WatchSource:0}: Error finding container 8b91bdda83f099f5be1141379c1f18a232f82be1870a0902aa1ec810e43f3d6a: Status 404 returned error can't find the container with id 8b91bdda83f099f5be1141379c1f18a232f82be1870a0902aa1ec810e43f3d6a Feb 03 07:30:18 crc kubenswrapper[4708]: W0203 07:30:18.195010 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7bbff83_97e5_41e1_8445_0c7deba26930.slice/crio-9f17c28e9e2749c7f93f995de47274949d70a5447a62578d655124ffbcbd3fcf WatchSource:0}: Error finding container 9f17c28e9e2749c7f93f995de47274949d70a5447a62578d655124ffbcbd3fcf: Status 404 returned error can't find the container with id 9f17c28e9e2749c7f93f995de47274949d70a5447a62578d655124ffbcbd3fcf Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.415785 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5wzvm"] Feb 03 07:30:18 crc kubenswrapper[4708]: W0203 07:30:18.416714 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65d33071_644f_4642_a6b3_ee141d7d6360.slice/crio-c0e66d2ad53ef995036101afee62d782e661757d68f167c29f735496382dc838 WatchSource:0}: Error finding container c0e66d2ad53ef995036101afee62d782e661757d68f167c29f735496382dc838: Status 404 returned error can't find the container with id c0e66d2ad53ef995036101afee62d782e661757d68f167c29f735496382dc838 Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.766781 4708 generic.go:334] "Generic (PLEG): container finished" podID="e7bbff83-97e5-41e1-8445-0c7deba26930" containerID="a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503" exitCode=0 Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.767210 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" event={"ID":"e7bbff83-97e5-41e1-8445-0c7deba26930","Type":"ContainerDied","Data":"a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.767239 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" event={"ID":"e7bbff83-97e5-41e1-8445-0c7deba26930","Type":"ContainerStarted","Data":"9f17c28e9e2749c7f93f995de47274949d70a5447a62578d655124ffbcbd3fcf"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.777248 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5wzvm" event={"ID":"65d33071-644f-4642-a6b3-ee141d7d6360","Type":"ContainerStarted","Data":"63f582bf7f5d415f591a3d0afd8cae298bcaca5ffc6726f98dedc2945f289bad"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.777313 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5wzvm" event={"ID":"65d33071-644f-4642-a6b3-ee141d7d6360","Type":"ContainerStarted","Data":"c0e66d2ad53ef995036101afee62d782e661757d68f167c29f735496382dc838"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.783174 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8","Type":"ContainerStarted","Data":"8e7942062f4f5440205d37fcfa661722e1cc50a44e48bf5f92f8f1750096aaeb"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.798352 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6gqmd" event={"ID":"741b999e-f896-4960-8f9c-e9b4aade9039","Type":"ContainerStarted","Data":"a610cfdfca03aa0f3bcd8b6afc1f403d4b5f731f7deeac468e62aec0f615bfdf"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.812482 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.813256 4708 generic.go:334] "Generic (PLEG): container finished" podID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerID="00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0" exitCode=137 Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.813328 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8b13517d-a5f6-4d5a-905d-79c0d0876c74","Type":"ContainerDied","Data":"00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.813355 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8b13517d-a5f6-4d5a-905d-79c0d0876c74","Type":"ContainerDied","Data":"bea406ec91f8290e7bd1e1deb521170eb1898b0e83ccbc678a13cbf96658838f"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.813372 4708 scope.go:117] "RemoveContainer" containerID="00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.826597 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcae63fb-2971-4a50-a678-1d3d3a598a4b","Type":"ContainerStarted","Data":"c35d53302845791c8cae82f4227e4c09df44121cb733066280dc0b1d706cfa26"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.829545 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fdc4c642-cd6a-4104-9594-647a3356296a","Type":"ContainerStarted","Data":"e0e37e52b8201c837926d023ce9f34c8b4a1de744d4f6c3ef3f64d5231e0b4df"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.830862 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-5wzvm" podStartSLOduration=1.830838309 podStartE2EDuration="1.830838309s" podCreationTimestamp="2026-02-03 07:30:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:30:18.801499463 +0000 UTC m=+1197.783446260" watchObservedRunningTime="2026-02-03 07:30:18.830838309 +0000 UTC m=+1197.812785116" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.840257 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"361821ae-c957-4e31-bb9b-6d659aaceec4","Type":"ContainerStarted","Data":"68c12dd7a6589d420a9bb85bed821813118bc8b315eb179ef244e14645927a10"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.840325 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"361821ae-c957-4e31-bb9b-6d659aaceec4","Type":"ContainerStarted","Data":"7366bd6d525312748a6f960119826c4e070d397cee12d5b1687c990bafb19e33"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.842460 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-6gqmd" podStartSLOduration=2.842440325 podStartE2EDuration="2.842440325s" podCreationTimestamp="2026-02-03 07:30:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:30:18.813523391 +0000 UTC m=+1197.795470198" watchObservedRunningTime="2026-02-03 07:30:18.842440325 +0000 UTC m=+1197.824387122" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.860856 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"72179ea9-93e3-4e20-907f-bcf317cd4f80","Type":"ContainerStarted","Data":"8b91bdda83f099f5be1141379c1f18a232f82be1870a0902aa1ec810e43f3d6a"} Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.866204 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-sg-core-conf-yaml\") pod \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.866254 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-run-httpd\") pod \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.866326 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-scripts\") pod \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.866401 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqhsr\" (UniqueName: \"kubernetes.io/projected/8b13517d-a5f6-4d5a-905d-79c0d0876c74-kube-api-access-dqhsr\") pod \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.866442 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-log-httpd\") pod \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.866582 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-combined-ca-bundle\") pod \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.866604 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-config-data\") pod \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\" (UID: \"8b13517d-a5f6-4d5a-905d-79c0d0876c74\") " Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.872098 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-scripts" (OuterVolumeSpecName: "scripts") pod "8b13517d-a5f6-4d5a-905d-79c0d0876c74" (UID: "8b13517d-a5f6-4d5a-905d-79c0d0876c74"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.872491 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8b13517d-a5f6-4d5a-905d-79c0d0876c74" (UID: "8b13517d-a5f6-4d5a-905d-79c0d0876c74"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.873251 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b13517d-a5f6-4d5a-905d-79c0d0876c74-kube-api-access-dqhsr" (OuterVolumeSpecName: "kube-api-access-dqhsr") pod "8b13517d-a5f6-4d5a-905d-79c0d0876c74" (UID: "8b13517d-a5f6-4d5a-905d-79c0d0876c74"). InnerVolumeSpecName "kube-api-access-dqhsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.873325 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8b13517d-a5f6-4d5a-905d-79c0d0876c74" (UID: "8b13517d-a5f6-4d5a-905d-79c0d0876c74"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.956329 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8b13517d-a5f6-4d5a-905d-79c0d0876c74" (UID: "8b13517d-a5f6-4d5a-905d-79c0d0876c74"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.970020 4708 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.970053 4708 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.970063 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.970071 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqhsr\" (UniqueName: \"kubernetes.io/projected/8b13517d-a5f6-4d5a-905d-79c0d0876c74-kube-api-access-dqhsr\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.970080 4708 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8b13517d-a5f6-4d5a-905d-79c0d0876c74-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:18 crc kubenswrapper[4708]: I0203 07:30:18.989036 4708 scope.go:117] "RemoveContainer" containerID="2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.018234 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-config-data" (OuterVolumeSpecName: "config-data") pod "8b13517d-a5f6-4d5a-905d-79c0d0876c74" (UID: "8b13517d-a5f6-4d5a-905d-79c0d0876c74"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.041449 4708 scope.go:117] "RemoveContainer" containerID="2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.052533 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b13517d-a5f6-4d5a-905d-79c0d0876c74" (UID: "8b13517d-a5f6-4d5a-905d-79c0d0876c74"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.071482 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.071511 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b13517d-a5f6-4d5a-905d-79c0d0876c74-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.081189 4708 scope.go:117] "RemoveContainer" containerID="1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.116418 4708 scope.go:117] "RemoveContainer" containerID="00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0" Feb 03 07:30:19 crc kubenswrapper[4708]: E0203 07:30:19.116878 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0\": container with ID starting with 00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0 not found: ID does not exist" containerID="00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.116901 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0"} err="failed to get container status \"00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0\": rpc error: code = NotFound desc = could not find container \"00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0\": container with ID starting with 00d0682d23efcca3dc838d9c96a1c01fadee47be47e318937e6ce57541adafd0 not found: ID does not exist" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.116919 4708 scope.go:117] "RemoveContainer" containerID="2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e" Feb 03 07:30:19 crc kubenswrapper[4708]: E0203 07:30:19.117518 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e\": container with ID starting with 2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e not found: ID does not exist" containerID="2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.117575 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e"} err="failed to get container status \"2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e\": rpc error: code = NotFound desc = could not find container \"2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e\": container with ID starting with 2757fe895b0a34ba438484c54d3347b91e54401cd85b01890087b2078589ad2e not found: ID does not exist" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.117602 4708 scope.go:117] "RemoveContainer" containerID="2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f" Feb 03 07:30:19 crc kubenswrapper[4708]: E0203 07:30:19.117896 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f\": container with ID starting with 2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f not found: ID does not exist" containerID="2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.117928 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f"} err="failed to get container status \"2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f\": rpc error: code = NotFound desc = could not find container \"2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f\": container with ID starting with 2626b3a8658c29586814ce0ba514a4248dbe2954330627eeec5804ce5789226f not found: ID does not exist" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.117945 4708 scope.go:117] "RemoveContainer" containerID="1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83" Feb 03 07:30:19 crc kubenswrapper[4708]: E0203 07:30:19.119171 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83\": container with ID starting with 1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83 not found: ID does not exist" containerID="1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.119212 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83"} err="failed to get container status \"1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83\": rpc error: code = NotFound desc = could not find container \"1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83\": container with ID starting with 1fc708ec3c98582da0dc149659d66e3eb94ba8c483012081e2af775d936bae83 not found: ID does not exist" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.875808 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.878638 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" event={"ID":"e7bbff83-97e5-41e1-8445-0c7deba26930","Type":"ContainerStarted","Data":"22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89"} Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.878827 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.884157 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ironic-conductor-0" event={"ID":"361821ae-c957-4e31-bb9b-6d659aaceec4","Type":"ContainerStarted","Data":"cc30d22f79a31534e0a4473cd0b1a017ac882cdd82c24a4962416b38f4d22177"} Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.884371 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ironic-conductor-0" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.938822 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" podStartSLOduration=2.938777283 podStartE2EDuration="2.938777283s" podCreationTimestamp="2026-02-03 07:30:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:30:19.89901 +0000 UTC m=+1198.880956817" watchObservedRunningTime="2026-02-03 07:30:19.938777283 +0000 UTC m=+1198.920724090" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.946858 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.962248 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.990093 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ironic-conductor-0" podStartSLOduration=54.723968096 podStartE2EDuration="1m35.990074512s" podCreationTimestamp="2026-02-03 07:28:44 +0000 UTC" firstStartedPulling="2026-02-03 07:28:48.754260989 +0000 UTC m=+1107.736207796" lastFinishedPulling="2026-02-03 07:29:30.020367385 +0000 UTC m=+1149.002314212" observedRunningTime="2026-02-03 07:30:19.939934802 +0000 UTC m=+1198.921881619" watchObservedRunningTime="2026-02-03 07:30:19.990074512 +0000 UTC m=+1198.972021349" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.992240 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:30:19 crc kubenswrapper[4708]: E0203 07:30:19.993021 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="proxy-httpd" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.993095 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="proxy-httpd" Feb 03 07:30:19 crc kubenswrapper[4708]: E0203 07:30:19.993167 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="ceilometer-central-agent" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.993355 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="ceilometer-central-agent" Feb 03 07:30:19 crc kubenswrapper[4708]: E0203 07:30:19.993437 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="sg-core" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.993494 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="sg-core" Feb 03 07:30:19 crc kubenswrapper[4708]: E0203 07:30:19.993584 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="ceilometer-notification-agent" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.993642 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="ceilometer-notification-agent" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.993942 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="proxy-httpd" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.994019 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="ceilometer-central-agent" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.994099 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="sg-core" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.994161 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" containerName="ceilometer-notification-agent" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.996931 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:30:19 crc kubenswrapper[4708]: I0203 07:30:19.999778 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.000215 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.010335 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.089277 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsf8v\" (UniqueName: \"kubernetes.io/projected/19739ef7-2ac6-46b5-84fb-308b16671ecc-kube-api-access-tsf8v\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.089330 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.089728 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-config-data\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.089898 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.089941 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-scripts\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.089981 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-run-httpd\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.090008 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-log-httpd\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.105978 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b13517d-a5f6-4d5a-905d-79c0d0876c74" path="/var/lib/kubelet/pods/8b13517d-a5f6-4d5a-905d-79c0d0876c74/volumes" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.191307 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-config-data\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.191365 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.191392 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-scripts\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.191424 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-run-httpd\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.191445 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-log-httpd\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.191499 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsf8v\" (UniqueName: \"kubernetes.io/projected/19739ef7-2ac6-46b5-84fb-308b16671ecc-kube-api-access-tsf8v\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.191545 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.192053 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-log-httpd\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.192130 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-run-httpd\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.198059 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-scripts\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.198236 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.208110 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.212295 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsf8v\" (UniqueName: \"kubernetes.io/projected/19739ef7-2ac6-46b5-84fb-308b16671ecc-kube-api-access-tsf8v\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.226160 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-config-data\") pod \"ceilometer-0\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.313402 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.319274 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ironic-conductor-0" Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.346571 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:30:20 crc kubenswrapper[4708]: I0203 07:30:20.358005 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:30:21 crc kubenswrapper[4708]: I0203 07:30:21.546744 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/ironic-conductor-0" podUID="361821ae-c957-4e31-bb9b-6d659aaceec4" containerName="ironic-conductor" probeResult="failure" output=< Feb 03 07:30:21 crc kubenswrapper[4708]: ironic-conductor-0 is offline Feb 03 07:30:21 crc kubenswrapper[4708]: > Feb 03 07:30:22 crc kubenswrapper[4708]: W0203 07:30:22.809894 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19739ef7_2ac6_46b5_84fb_308b16671ecc.slice/crio-ccd55ffec04468346af02e0e15f9904749dcde70ea92a14de2c29ae9433d0142 WatchSource:0}: Error finding container ccd55ffec04468346af02e0e15f9904749dcde70ea92a14de2c29ae9433d0142: Status 404 returned error can't find the container with id ccd55ffec04468346af02e0e15f9904749dcde70ea92a14de2c29ae9433d0142 Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.813786 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.908534 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19739ef7-2ac6-46b5-84fb-308b16671ecc","Type":"ContainerStarted","Data":"ccd55ffec04468346af02e0e15f9904749dcde70ea92a14de2c29ae9433d0142"} Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.910242 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"72179ea9-93e3-4e20-907f-bcf317cd4f80","Type":"ContainerStarted","Data":"05ba5b153a55cc9afb5a266092ec64c2ba5e7619492672973c37f7232fed0dea"} Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.910293 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"72179ea9-93e3-4e20-907f-bcf317cd4f80","Type":"ContainerStarted","Data":"d7000c44f31f7d3fd85e8c61aaa9b8fb0676c3100fb54c041b6997124ba8499d"} Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.910431 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="72179ea9-93e3-4e20-907f-bcf317cd4f80" containerName="nova-metadata-log" containerID="cri-o://d7000c44f31f7d3fd85e8c61aaa9b8fb0676c3100fb54c041b6997124ba8499d" gracePeriod=30 Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.911012 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="72179ea9-93e3-4e20-907f-bcf317cd4f80" containerName="nova-metadata-metadata" containerID="cri-o://05ba5b153a55cc9afb5a266092ec64c2ba5e7619492672973c37f7232fed0dea" gracePeriod=30 Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.922385 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcae63fb-2971-4a50-a678-1d3d3a598a4b","Type":"ContainerStarted","Data":"07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459"} Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.922436 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcae63fb-2971-4a50-a678-1d3d3a598a4b","Type":"ContainerStarted","Data":"900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566"} Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.927153 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fdc4c642-cd6a-4104-9594-647a3356296a","Type":"ContainerStarted","Data":"d3e3f4f752245ffe166a35e679f85b8e9c0ba5f8fc2490175cf70632d2a70448"} Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.927286 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="fdc4c642-cd6a-4104-9594-647a3356296a" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://d3e3f4f752245ffe166a35e679f85b8e9c0ba5f8fc2490175cf70632d2a70448" gracePeriod=30 Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.931502 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8","Type":"ContainerStarted","Data":"7727598660eb74efa92611ece9040b52349c38e62d9a5c1b951af6066d828030"} Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.936880 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.773390739 podStartE2EDuration="6.936862984s" podCreationTimestamp="2026-02-03 07:30:16 +0000 UTC" firstStartedPulling="2026-02-03 07:30:18.189468631 +0000 UTC m=+1197.171415438" lastFinishedPulling="2026-02-03 07:30:22.352940876 +0000 UTC m=+1201.334887683" observedRunningTime="2026-02-03 07:30:22.927469482 +0000 UTC m=+1201.909416309" watchObservedRunningTime="2026-02-03 07:30:22.936862984 +0000 UTC m=+1201.918809791" Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.966965 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.5951112309999997 podStartE2EDuration="6.966945858s" podCreationTimestamp="2026-02-03 07:30:16 +0000 UTC" firstStartedPulling="2026-02-03 07:30:17.972003554 +0000 UTC m=+1196.953950361" lastFinishedPulling="2026-02-03 07:30:22.343838181 +0000 UTC m=+1201.325784988" observedRunningTime="2026-02-03 07:30:22.954038999 +0000 UTC m=+1201.935985806" watchObservedRunningTime="2026-02-03 07:30:22.966945858 +0000 UTC m=+1201.948892665" Feb 03 07:30:22 crc kubenswrapper[4708]: I0203 07:30:22.977495 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.394297286 podStartE2EDuration="6.977480509s" podCreationTimestamp="2026-02-03 07:30:16 +0000 UTC" firstStartedPulling="2026-02-03 07:30:17.760687139 +0000 UTC m=+1196.742633946" lastFinishedPulling="2026-02-03 07:30:22.343870352 +0000 UTC m=+1201.325817169" observedRunningTime="2026-02-03 07:30:22.973596632 +0000 UTC m=+1201.955543439" watchObservedRunningTime="2026-02-03 07:30:22.977480509 +0000 UTC m=+1201.959427316" Feb 03 07:30:23 crc kubenswrapper[4708]: I0203 07:30:23.007630 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.376161237 podStartE2EDuration="7.007609223s" podCreationTimestamp="2026-02-03 07:30:16 +0000 UTC" firstStartedPulling="2026-02-03 07:30:17.712511918 +0000 UTC m=+1196.694458725" lastFinishedPulling="2026-02-03 07:30:22.343959904 +0000 UTC m=+1201.325906711" observedRunningTime="2026-02-03 07:30:22.998692523 +0000 UTC m=+1201.980639340" watchObservedRunningTime="2026-02-03 07:30:23.007609223 +0000 UTC m=+1201.989556030" Feb 03 07:30:23 crc kubenswrapper[4708]: I0203 07:30:23.820608 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ironic-conductor-0" Feb 03 07:30:23 crc kubenswrapper[4708]: I0203 07:30:23.826589 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ironic-conductor-0" Feb 03 07:30:23 crc kubenswrapper[4708]: I0203 07:30:23.833474 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:30:23 crc kubenswrapper[4708]: I0203 07:30:23.833545 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:30:23 crc kubenswrapper[4708]: I0203 07:30:23.956827 4708 generic.go:334] "Generic (PLEG): container finished" podID="72179ea9-93e3-4e20-907f-bcf317cd4f80" containerID="d7000c44f31f7d3fd85e8c61aaa9b8fb0676c3100fb54c041b6997124ba8499d" exitCode=143 Feb 03 07:30:23 crc kubenswrapper[4708]: I0203 07:30:23.956907 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"72179ea9-93e3-4e20-907f-bcf317cd4f80","Type":"ContainerDied","Data":"d7000c44f31f7d3fd85e8c61aaa9b8fb0676c3100fb54c041b6997124ba8499d"} Feb 03 07:30:23 crc kubenswrapper[4708]: I0203 07:30:23.970971 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19739ef7-2ac6-46b5-84fb-308b16671ecc","Type":"ContainerStarted","Data":"a731d76dfd3e1491ecf9d2574ad19628f8b8d200fe02236b5d5216f74dfd7e00"} Feb 03 07:30:24 crc kubenswrapper[4708]: I0203 07:30:24.980846 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19739ef7-2ac6-46b5-84fb-308b16671ecc","Type":"ContainerStarted","Data":"5d0e6f2364c854eb3c38730b61ca6f52a8591bd813bc300466789bf1b07a830d"} Feb 03 07:30:25 crc kubenswrapper[4708]: I0203 07:30:25.996643 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19739ef7-2ac6-46b5-84fb-308b16671ecc","Type":"ContainerStarted","Data":"961b214c19b1a9b102099f7d89b8f43892b409daac635b8d6c905f95706420db"} Feb 03 07:30:26 crc kubenswrapper[4708]: I0203 07:30:26.000161 4708 generic.go:334] "Generic (PLEG): container finished" podID="741b999e-f896-4960-8f9c-e9b4aade9039" containerID="a610cfdfca03aa0f3bcd8b6afc1f403d4b5f731f7deeac468e62aec0f615bfdf" exitCode=0 Feb 03 07:30:26 crc kubenswrapper[4708]: I0203 07:30:26.000246 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6gqmd" event={"ID":"741b999e-f896-4960-8f9c-e9b4aade9039","Type":"ContainerDied","Data":"a610cfdfca03aa0f3bcd8b6afc1f403d4b5f731f7deeac468e62aec0f615bfdf"} Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.221907 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.226145 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.226206 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.260725 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.287332 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.287767 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.365335 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.404118 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.404360 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.473840 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-config-data\") pod \"741b999e-f896-4960-8f9c-e9b4aade9039\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.473950 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-scripts\") pod \"741b999e-f896-4960-8f9c-e9b4aade9039\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.474068 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-combined-ca-bundle\") pod \"741b999e-f896-4960-8f9c-e9b4aade9039\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.474097 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djhzb\" (UniqueName: \"kubernetes.io/projected/741b999e-f896-4960-8f9c-e9b4aade9039-kube-api-access-djhzb\") pod \"741b999e-f896-4960-8f9c-e9b4aade9039\" (UID: \"741b999e-f896-4960-8f9c-e9b4aade9039\") " Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.480901 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/741b999e-f896-4960-8f9c-e9b4aade9039-kube-api-access-djhzb" (OuterVolumeSpecName: "kube-api-access-djhzb") pod "741b999e-f896-4960-8f9c-e9b4aade9039" (UID: "741b999e-f896-4960-8f9c-e9b4aade9039"). InnerVolumeSpecName "kube-api-access-djhzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.481173 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-scripts" (OuterVolumeSpecName: "scripts") pod "741b999e-f896-4960-8f9c-e9b4aade9039" (UID: "741b999e-f896-4960-8f9c-e9b4aade9039"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.504429 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "741b999e-f896-4960-8f9c-e9b4aade9039" (UID: "741b999e-f896-4960-8f9c-e9b4aade9039"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.510093 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-config-data" (OuterVolumeSpecName: "config-data") pod "741b999e-f896-4960-8f9c-e9b4aade9039" (UID: "741b999e-f896-4960-8f9c-e9b4aade9039"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.559639 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.576394 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.576428 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djhzb\" (UniqueName: \"kubernetes.io/projected/741b999e-f896-4960-8f9c-e9b4aade9039-kube-api-access-djhzb\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.576440 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.576452 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/741b999e-f896-4960-8f9c-e9b4aade9039-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.631902 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-b8p62"] Feb 03 07:30:27 crc kubenswrapper[4708]: I0203 07:30:27.632181 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" podUID="d80380a3-aaeb-40d6-a30f-ced06d3885d4" containerName="dnsmasq-dns" containerID="cri-o://f6b54db189c22f26b17e6f3aa38cceaedb4078610ed39e2b0b3a3ca86bbb4050" gracePeriod=10 Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.050909 4708 generic.go:334] "Generic (PLEG): container finished" podID="65d33071-644f-4642-a6b3-ee141d7d6360" containerID="63f582bf7f5d415f591a3d0afd8cae298bcaca5ffc6726f98dedc2945f289bad" exitCode=0 Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.050976 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5wzvm" event={"ID":"65d33071-644f-4642-a6b3-ee141d7d6360","Type":"ContainerDied","Data":"63f582bf7f5d415f591a3d0afd8cae298bcaca5ffc6726f98dedc2945f289bad"} Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.065643 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19739ef7-2ac6-46b5-84fb-308b16671ecc","Type":"ContainerStarted","Data":"baef3af92ed5808a9fbdc2896f0b3d5eab08cec0ab5fa1e0ff40c3b105d40deb"} Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.065725 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.074230 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6gqmd" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.074237 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6gqmd" event={"ID":"741b999e-f896-4960-8f9c-e9b4aade9039","Type":"ContainerDied","Data":"4c9b0f80bf62e731a4324184edea3d523ed79e89d59ffa44a9a4e97190f98e99"} Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.074272 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c9b0f80bf62e731a4324184edea3d523ed79e89d59ffa44a9a4e97190f98e99" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.074624 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.075982 4708 generic.go:334] "Generic (PLEG): container finished" podID="d80380a3-aaeb-40d6-a30f-ced06d3885d4" containerID="f6b54db189c22f26b17e6f3aa38cceaedb4078610ed39e2b0b3a3ca86bbb4050" exitCode=0 Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.076777 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" event={"ID":"d80380a3-aaeb-40d6-a30f-ced06d3885d4","Type":"ContainerDied","Data":"f6b54db189c22f26b17e6f3aa38cceaedb4078610ed39e2b0b3a3ca86bbb4050"} Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.076821 4708 scope.go:117] "RemoveContainer" containerID="f6b54db189c22f26b17e6f3aa38cceaedb4078610ed39e2b0b3a3ca86bbb4050" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.085165 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-sb\") pod \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.085250 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-nb\") pod \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.085435 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zvqg\" (UniqueName: \"kubernetes.io/projected/d80380a3-aaeb-40d6-a30f-ced06d3885d4-kube-api-access-2zvqg\") pod \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.085503 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-config\") pod \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.085541 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-swift-storage-0\") pod \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.085573 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-svc\") pod \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\" (UID: \"d80380a3-aaeb-40d6-a30f-ced06d3885d4\") " Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.102822 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.7623128139999995 podStartE2EDuration="9.102807646s" podCreationTimestamp="2026-02-03 07:30:19 +0000 UTC" firstStartedPulling="2026-02-03 07:30:22.812018847 +0000 UTC m=+1201.793965654" lastFinishedPulling="2026-02-03 07:30:27.152513679 +0000 UTC m=+1206.134460486" observedRunningTime="2026-02-03 07:30:28.084103783 +0000 UTC m=+1207.066050590" watchObservedRunningTime="2026-02-03 07:30:28.102807646 +0000 UTC m=+1207.084754453" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.116918 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d80380a3-aaeb-40d6-a30f-ced06d3885d4-kube-api-access-2zvqg" (OuterVolumeSpecName: "kube-api-access-2zvqg") pod "d80380a3-aaeb-40d6-a30f-ced06d3885d4" (UID: "d80380a3-aaeb-40d6-a30f-ced06d3885d4"). InnerVolumeSpecName "kube-api-access-2zvqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.122697 4708 scope.go:117] "RemoveContainer" containerID="338b88f454f739a8b05c9ab18020e42ffcabd48f9599a383feba57c88d6f8dd0" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.134949 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.159514 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d80380a3-aaeb-40d6-a30f-ced06d3885d4" (UID: "d80380a3-aaeb-40d6-a30f-ced06d3885d4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.188054 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zvqg\" (UniqueName: \"kubernetes.io/projected/d80380a3-aaeb-40d6-a30f-ced06d3885d4-kube-api-access-2zvqg\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.188085 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.207358 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-config" (OuterVolumeSpecName: "config") pod "d80380a3-aaeb-40d6-a30f-ced06d3885d4" (UID: "d80380a3-aaeb-40d6-a30f-ced06d3885d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.244405 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d80380a3-aaeb-40d6-a30f-ced06d3885d4" (UID: "d80380a3-aaeb-40d6-a30f-ced06d3885d4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.247117 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.258889 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d80380a3-aaeb-40d6-a30f-ced06d3885d4" (UID: "d80380a3-aaeb-40d6-a30f-ced06d3885d4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.265417 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d80380a3-aaeb-40d6-a30f-ced06d3885d4" (UID: "d80380a3-aaeb-40d6-a30f-ced06d3885d4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.271784 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.290048 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.290081 4708 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.290093 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.290103 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d80380a3-aaeb-40d6-a30f-ced06d3885d4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.371027 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:30:28 crc kubenswrapper[4708]: I0203 07:30:28.371074 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.086533 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" event={"ID":"d80380a3-aaeb-40d6-a30f-ced06d3885d4","Type":"ContainerDied","Data":"63a136526da9ddb726feded4fe2f9e113debae7f3f9005736512bf2086ae501b"} Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.086539 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.087200 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerName="nova-api-log" containerID="cri-o://900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566" gracePeriod=30 Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.087759 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerName="nova-api-api" containerID="cri-o://07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459" gracePeriod=30 Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.140119 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-b8p62"] Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.147353 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-b8p62"] Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.540423 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.715269 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-config-data\") pod \"65d33071-644f-4642-a6b3-ee141d7d6360\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.715715 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twtcz\" (UniqueName: \"kubernetes.io/projected/65d33071-644f-4642-a6b3-ee141d7d6360-kube-api-access-twtcz\") pod \"65d33071-644f-4642-a6b3-ee141d7d6360\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.715788 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-combined-ca-bundle\") pod \"65d33071-644f-4642-a6b3-ee141d7d6360\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.715853 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-scripts\") pod \"65d33071-644f-4642-a6b3-ee141d7d6360\" (UID: \"65d33071-644f-4642-a6b3-ee141d7d6360\") " Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.722560 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-scripts" (OuterVolumeSpecName: "scripts") pod "65d33071-644f-4642-a6b3-ee141d7d6360" (UID: "65d33071-644f-4642-a6b3-ee141d7d6360"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.726008 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65d33071-644f-4642-a6b3-ee141d7d6360-kube-api-access-twtcz" (OuterVolumeSpecName: "kube-api-access-twtcz") pod "65d33071-644f-4642-a6b3-ee141d7d6360" (UID: "65d33071-644f-4642-a6b3-ee141d7d6360"). InnerVolumeSpecName "kube-api-access-twtcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.750417 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-config-data" (OuterVolumeSpecName: "config-data") pod "65d33071-644f-4642-a6b3-ee141d7d6360" (UID: "65d33071-644f-4642-a6b3-ee141d7d6360"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.754102 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65d33071-644f-4642-a6b3-ee141d7d6360" (UID: "65d33071-644f-4642-a6b3-ee141d7d6360"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.817941 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twtcz\" (UniqueName: \"kubernetes.io/projected/65d33071-644f-4642-a6b3-ee141d7d6360-kube-api-access-twtcz\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.818148 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.818167 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:29 crc kubenswrapper[4708]: I0203 07:30:29.818180 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65d33071-644f-4642-a6b3-ee141d7d6360-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.098311 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-5wzvm" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.101161 4708 generic.go:334] "Generic (PLEG): container finished" podID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerID="900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566" exitCode=143 Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.101372 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="58a6c72b-c4c2-4f2e-9a56-7a94427a10c8" containerName="nova-scheduler-scheduler" containerID="cri-o://7727598660eb74efa92611ece9040b52349c38e62d9a5c1b951af6066d828030" gracePeriod=30 Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.108757 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d80380a3-aaeb-40d6-a30f-ced06d3885d4" path="/var/lib/kubelet/pods/d80380a3-aaeb-40d6-a30f-ced06d3885d4/volumes" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.109579 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-5wzvm" event={"ID":"65d33071-644f-4642-a6b3-ee141d7d6360","Type":"ContainerDied","Data":"c0e66d2ad53ef995036101afee62d782e661757d68f167c29f735496382dc838"} Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.109609 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0e66d2ad53ef995036101afee62d782e661757d68f167c29f735496382dc838" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.109620 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcae63fb-2971-4a50-a678-1d3d3a598a4b","Type":"ContainerDied","Data":"900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566"} Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.156471 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 07:30:30 crc kubenswrapper[4708]: E0203 07:30:30.156842 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d80380a3-aaeb-40d6-a30f-ced06d3885d4" containerName="init" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.156859 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="d80380a3-aaeb-40d6-a30f-ced06d3885d4" containerName="init" Feb 03 07:30:30 crc kubenswrapper[4708]: E0203 07:30:30.156880 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741b999e-f896-4960-8f9c-e9b4aade9039" containerName="nova-manage" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.156886 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="741b999e-f896-4960-8f9c-e9b4aade9039" containerName="nova-manage" Feb 03 07:30:30 crc kubenswrapper[4708]: E0203 07:30:30.156899 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d80380a3-aaeb-40d6-a30f-ced06d3885d4" containerName="dnsmasq-dns" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.156906 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="d80380a3-aaeb-40d6-a30f-ced06d3885d4" containerName="dnsmasq-dns" Feb 03 07:30:30 crc kubenswrapper[4708]: E0203 07:30:30.156923 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d33071-644f-4642-a6b3-ee141d7d6360" containerName="nova-cell1-conductor-db-sync" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.156928 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d33071-644f-4642-a6b3-ee141d7d6360" containerName="nova-cell1-conductor-db-sync" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.157087 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="741b999e-f896-4960-8f9c-e9b4aade9039" containerName="nova-manage" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.157096 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="d80380a3-aaeb-40d6-a30f-ced06d3885d4" containerName="dnsmasq-dns" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.157115 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d33071-644f-4642-a6b3-ee141d7d6360" containerName="nova-cell1-conductor-db-sync" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.157677 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.161448 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.183563 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.225247 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209b7389-309a-47b9-bc02-7f7567848b8f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"209b7389-309a-47b9-bc02-7f7567848b8f\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.225491 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209b7389-309a-47b9-bc02-7f7567848b8f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"209b7389-309a-47b9-bc02-7f7567848b8f\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.225563 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4wn8\" (UniqueName: \"kubernetes.io/projected/209b7389-309a-47b9-bc02-7f7567848b8f-kube-api-access-w4wn8\") pod \"nova-cell1-conductor-0\" (UID: \"209b7389-309a-47b9-bc02-7f7567848b8f\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.326727 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209b7389-309a-47b9-bc02-7f7567848b8f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"209b7389-309a-47b9-bc02-7f7567848b8f\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.326802 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4wn8\" (UniqueName: \"kubernetes.io/projected/209b7389-309a-47b9-bc02-7f7567848b8f-kube-api-access-w4wn8\") pod \"nova-cell1-conductor-0\" (UID: \"209b7389-309a-47b9-bc02-7f7567848b8f\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.326852 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209b7389-309a-47b9-bc02-7f7567848b8f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"209b7389-309a-47b9-bc02-7f7567848b8f\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.331674 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209b7389-309a-47b9-bc02-7f7567848b8f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"209b7389-309a-47b9-bc02-7f7567848b8f\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.339507 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209b7389-309a-47b9-bc02-7f7567848b8f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"209b7389-309a-47b9-bc02-7f7567848b8f\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.343584 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4wn8\" (UniqueName: \"kubernetes.io/projected/209b7389-309a-47b9-bc02-7f7567848b8f-kube-api-access-w4wn8\") pod \"nova-cell1-conductor-0\" (UID: \"209b7389-309a-47b9-bc02-7f7567848b8f\") " pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.484540 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:30 crc kubenswrapper[4708]: I0203 07:30:30.999308 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 03 07:30:31 crc kubenswrapper[4708]: I0203 07:30:31.113087 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"209b7389-309a-47b9-bc02-7f7567848b8f","Type":"ContainerStarted","Data":"3b12b931138dd52a3fa160da175af6f523ea6586e8462fa83dec05cd8b609ced"} Feb 03 07:30:32 crc kubenswrapper[4708]: I0203 07:30:32.136859 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"209b7389-309a-47b9-bc02-7f7567848b8f","Type":"ContainerStarted","Data":"714bcf285b13b340f648d0c46bd54af2259ed1366e3c73d525736282f589a580"} Feb 03 07:30:32 crc kubenswrapper[4708]: I0203 07:30:32.137462 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:32 crc kubenswrapper[4708]: I0203 07:30:32.157894 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.15786589 podStartE2EDuration="2.15786589s" podCreationTimestamp="2026-02-03 07:30:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:30:32.154276042 +0000 UTC m=+1211.136222859" watchObservedRunningTime="2026-02-03 07:30:32.15786589 +0000 UTC m=+1211.139812707" Feb 03 07:30:32 crc kubenswrapper[4708]: E0203 07:30:32.227742 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7727598660eb74efa92611ece9040b52349c38e62d9a5c1b951af6066d828030" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:30:32 crc kubenswrapper[4708]: E0203 07:30:32.229324 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7727598660eb74efa92611ece9040b52349c38e62d9a5c1b951af6066d828030" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:30:32 crc kubenswrapper[4708]: E0203 07:30:32.230914 4708 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7727598660eb74efa92611ece9040b52349c38e62d9a5c1b951af6066d828030" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 03 07:30:32 crc kubenswrapper[4708]: E0203 07:30:32.230957 4708 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="58a6c72b-c4c2-4f2e-9a56-7a94427a10c8" containerName="nova-scheduler-scheduler" Feb 03 07:30:32 crc kubenswrapper[4708]: I0203 07:30:32.858843 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c9776ccc5-b8p62" podUID="d80380a3-aaeb-40d6-a30f-ced06d3885d4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.164:5353: i/o timeout" Feb 03 07:30:33 crc kubenswrapper[4708]: I0203 07:30:33.969515 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.137061 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-config-data\") pod \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.137113 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ncxx\" (UniqueName: \"kubernetes.io/projected/fcae63fb-2971-4a50-a678-1d3d3a598a4b-kube-api-access-6ncxx\") pod \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.137165 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcae63fb-2971-4a50-a678-1d3d3a598a4b-logs\") pod \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.137258 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-combined-ca-bundle\") pod \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\" (UID: \"fcae63fb-2971-4a50-a678-1d3d3a598a4b\") " Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.138898 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcae63fb-2971-4a50-a678-1d3d3a598a4b-logs" (OuterVolumeSpecName: "logs") pod "fcae63fb-2971-4a50-a678-1d3d3a598a4b" (UID: "fcae63fb-2971-4a50-a678-1d3d3a598a4b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.150969 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcae63fb-2971-4a50-a678-1d3d3a598a4b-kube-api-access-6ncxx" (OuterVolumeSpecName: "kube-api-access-6ncxx") pod "fcae63fb-2971-4a50-a678-1d3d3a598a4b" (UID: "fcae63fb-2971-4a50-a678-1d3d3a598a4b"). InnerVolumeSpecName "kube-api-access-6ncxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.169296 4708 generic.go:334] "Generic (PLEG): container finished" podID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerID="07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459" exitCode=0 Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.169357 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.169345 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcae63fb-2971-4a50-a678-1d3d3a598a4b","Type":"ContainerDied","Data":"07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459"} Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.169538 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcae63fb-2971-4a50-a678-1d3d3a598a4b","Type":"ContainerDied","Data":"c35d53302845791c8cae82f4227e4c09df44121cb733066280dc0b1d706cfa26"} Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.169570 4708 scope.go:117] "RemoveContainer" containerID="07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.177706 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-config-data" (OuterVolumeSpecName: "config-data") pod "fcae63fb-2971-4a50-a678-1d3d3a598a4b" (UID: "fcae63fb-2971-4a50-a678-1d3d3a598a4b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.179945 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fcae63fb-2971-4a50-a678-1d3d3a598a4b" (UID: "fcae63fb-2971-4a50-a678-1d3d3a598a4b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.239193 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.239245 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ncxx\" (UniqueName: \"kubernetes.io/projected/fcae63fb-2971-4a50-a678-1d3d3a598a4b-kube-api-access-6ncxx\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.239259 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcae63fb-2971-4a50-a678-1d3d3a598a4b-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.239270 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcae63fb-2971-4a50-a678-1d3d3a598a4b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.266769 4708 scope.go:117] "RemoveContainer" containerID="900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.285474 4708 scope.go:117] "RemoveContainer" containerID="07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459" Feb 03 07:30:34 crc kubenswrapper[4708]: E0203 07:30:34.286402 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459\": container with ID starting with 07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459 not found: ID does not exist" containerID="07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.286446 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459"} err="failed to get container status \"07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459\": rpc error: code = NotFound desc = could not find container \"07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459\": container with ID starting with 07d589453406b2d27cf303e69a0eb1605f6735a55cc5510050e704e390e5f459 not found: ID does not exist" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.286472 4708 scope.go:117] "RemoveContainer" containerID="900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566" Feb 03 07:30:34 crc kubenswrapper[4708]: E0203 07:30:34.286716 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566\": container with ID starting with 900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566 not found: ID does not exist" containerID="900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.286747 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566"} err="failed to get container status \"900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566\": rpc error: code = NotFound desc = could not find container \"900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566\": container with ID starting with 900b21bd9f26efef475f27a500fc6408941c3e91644191e288e620c11a579566 not found: ID does not exist" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.503868 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.522252 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.537331 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 07:30:34 crc kubenswrapper[4708]: E0203 07:30:34.537717 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerName="nova-api-log" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.537741 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerName="nova-api-log" Feb 03 07:30:34 crc kubenswrapper[4708]: E0203 07:30:34.537770 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerName="nova-api-api" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.537777 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerName="nova-api-api" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.537977 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerName="nova-api-api" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.538002 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" containerName="nova-api-log" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.539393 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.546266 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.556543 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.565156 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a461a3e7-34d0-428b-b511-adbe5443a526-logs\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.565248 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-config-data\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.565343 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.565396 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skcbl\" (UniqueName: \"kubernetes.io/projected/a461a3e7-34d0-428b-b511-adbe5443a526-kube-api-access-skcbl\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.666878 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a461a3e7-34d0-428b-b511-adbe5443a526-logs\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.666978 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-config-data\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.667030 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.667079 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skcbl\" (UniqueName: \"kubernetes.io/projected/a461a3e7-34d0-428b-b511-adbe5443a526-kube-api-access-skcbl\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.667676 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a461a3e7-34d0-428b-b511-adbe5443a526-logs\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.671575 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.671673 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-config-data\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.685626 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skcbl\" (UniqueName: \"kubernetes.io/projected/a461a3e7-34d0-428b-b511-adbe5443a526-kube-api-access-skcbl\") pod \"nova-api-0\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " pod="openstack/nova-api-0" Feb 03 07:30:34 crc kubenswrapper[4708]: I0203 07:30:34.894779 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.181716 4708 generic.go:334] "Generic (PLEG): container finished" podID="58a6c72b-c4c2-4f2e-9a56-7a94427a10c8" containerID="7727598660eb74efa92611ece9040b52349c38e62d9a5c1b951af6066d828030" exitCode=0 Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.181994 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8","Type":"ContainerDied","Data":"7727598660eb74efa92611ece9040b52349c38e62d9a5c1b951af6066d828030"} Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.189435 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.378085 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfcjr\" (UniqueName: \"kubernetes.io/projected/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-kube-api-access-mfcjr\") pod \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.378731 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-config-data\") pod \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.378890 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-combined-ca-bundle\") pod \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\" (UID: \"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8\") " Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.382762 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-kube-api-access-mfcjr" (OuterVolumeSpecName: "kube-api-access-mfcjr") pod "58a6c72b-c4c2-4f2e-9a56-7a94427a10c8" (UID: "58a6c72b-c4c2-4f2e-9a56-7a94427a10c8"). InnerVolumeSpecName "kube-api-access-mfcjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.409616 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-config-data" (OuterVolumeSpecName: "config-data") pod "58a6c72b-c4c2-4f2e-9a56-7a94427a10c8" (UID: "58a6c72b-c4c2-4f2e-9a56-7a94427a10c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.430885 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58a6c72b-c4c2-4f2e-9a56-7a94427a10c8" (UID: "58a6c72b-c4c2-4f2e-9a56-7a94427a10c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.436400 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.481129 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.481161 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:35 crc kubenswrapper[4708]: I0203 07:30:35.481171 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfcjr\" (UniqueName: \"kubernetes.io/projected/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8-kube-api-access-mfcjr\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.104354 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcae63fb-2971-4a50-a678-1d3d3a598a4b" path="/var/lib/kubelet/pods/fcae63fb-2971-4a50-a678-1d3d3a598a4b/volumes" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.190596 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a461a3e7-34d0-428b-b511-adbe5443a526","Type":"ContainerStarted","Data":"71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73"} Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.190650 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a461a3e7-34d0-428b-b511-adbe5443a526","Type":"ContainerStarted","Data":"ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054"} Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.190660 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a461a3e7-34d0-428b-b511-adbe5443a526","Type":"ContainerStarted","Data":"254fa7296522ef3c40b0631321202b5c28d7ed6e7fc1b9b3a7b0215b8d788b1b"} Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.194616 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"58a6c72b-c4c2-4f2e-9a56-7a94427a10c8","Type":"ContainerDied","Data":"8e7942062f4f5440205d37fcfa661722e1cc50a44e48bf5f92f8f1750096aaeb"} Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.194658 4708 scope.go:117] "RemoveContainer" containerID="7727598660eb74efa92611ece9040b52349c38e62d9a5c1b951af6066d828030" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.194691 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.214848 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.214826012 podStartE2EDuration="2.214826012s" podCreationTimestamp="2026-02-03 07:30:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:30:36.208262619 +0000 UTC m=+1215.190209426" watchObservedRunningTime="2026-02-03 07:30:36.214826012 +0000 UTC m=+1215.196772829" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.230912 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.243347 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.260271 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:30:36 crc kubenswrapper[4708]: E0203 07:30:36.260804 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58a6c72b-c4c2-4f2e-9a56-7a94427a10c8" containerName="nova-scheduler-scheduler" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.260830 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="58a6c72b-c4c2-4f2e-9a56-7a94427a10c8" containerName="nova-scheduler-scheduler" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.261098 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="58a6c72b-c4c2-4f2e-9a56-7a94427a10c8" containerName="nova-scheduler-scheduler" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.261891 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.264238 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.271520 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.397515 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-config-data\") pod \"nova-scheduler-0\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.397590 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.397913 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2d2l\" (UniqueName: \"kubernetes.io/projected/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-kube-api-access-d2d2l\") pod \"nova-scheduler-0\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.499564 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2d2l\" (UniqueName: \"kubernetes.io/projected/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-kube-api-access-d2d2l\") pod \"nova-scheduler-0\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.499713 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-config-data\") pod \"nova-scheduler-0\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.499741 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.505684 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-config-data\") pod \"nova-scheduler-0\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.506331 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.523006 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2d2l\" (UniqueName: \"kubernetes.io/projected/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-kube-api-access-d2d2l\") pod \"nova-scheduler-0\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " pod="openstack/nova-scheduler-0" Feb 03 07:30:36 crc kubenswrapper[4708]: I0203 07:30:36.580471 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:30:37 crc kubenswrapper[4708]: I0203 07:30:37.057123 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:30:37 crc kubenswrapper[4708]: W0203 07:30:37.059451 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08e414ef_4cf0_42c0_931b_299c7ff1a7a9.slice/crio-0f49351f84fc55aff2539525f0be1e87f088eb407819b58effeda4f179b6bf2b WatchSource:0}: Error finding container 0f49351f84fc55aff2539525f0be1e87f088eb407819b58effeda4f179b6bf2b: Status 404 returned error can't find the container with id 0f49351f84fc55aff2539525f0be1e87f088eb407819b58effeda4f179b6bf2b Feb 03 07:30:37 crc kubenswrapper[4708]: I0203 07:30:37.206945 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"08e414ef-4cf0-42c0-931b-299c7ff1a7a9","Type":"ContainerStarted","Data":"0f49351f84fc55aff2539525f0be1e87f088eb407819b58effeda4f179b6bf2b"} Feb 03 07:30:38 crc kubenswrapper[4708]: I0203 07:30:38.106879 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58a6c72b-c4c2-4f2e-9a56-7a94427a10c8" path="/var/lib/kubelet/pods/58a6c72b-c4c2-4f2e-9a56-7a94427a10c8/volumes" Feb 03 07:30:38 crc kubenswrapper[4708]: I0203 07:30:38.219392 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"08e414ef-4cf0-42c0-931b-299c7ff1a7a9","Type":"ContainerStarted","Data":"a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34"} Feb 03 07:30:38 crc kubenswrapper[4708]: I0203 07:30:38.257755 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.257731134 podStartE2EDuration="2.257731134s" podCreationTimestamp="2026-02-03 07:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:30:38.238767014 +0000 UTC m=+1217.220713841" watchObservedRunningTime="2026-02-03 07:30:38.257731134 +0000 UTC m=+1217.239677961" Feb 03 07:30:40 crc kubenswrapper[4708]: I0203 07:30:40.523220 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 03 07:30:41 crc kubenswrapper[4708]: I0203 07:30:41.581081 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 03 07:30:44 crc kubenswrapper[4708]: I0203 07:30:44.895299 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:30:44 crc kubenswrapper[4708]: I0203 07:30:44.895836 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:30:45 crc kubenswrapper[4708]: I0203 07:30:45.978037 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.202:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:30:45 crc kubenswrapper[4708]: I0203 07:30:45.978069 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.202:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:30:46 crc kubenswrapper[4708]: I0203 07:30:46.580874 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 03 07:30:46 crc kubenswrapper[4708]: I0203 07:30:46.630133 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 03 07:30:47 crc kubenswrapper[4708]: I0203 07:30:47.355874 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 03 07:30:50 crc kubenswrapper[4708]: I0203 07:30:50.326959 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.365067 4708 generic.go:334] "Generic (PLEG): container finished" podID="fdc4c642-cd6a-4104-9594-647a3356296a" containerID="d3e3f4f752245ffe166a35e679f85b8e9c0ba5f8fc2490175cf70632d2a70448" exitCode=137 Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.365164 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fdc4c642-cd6a-4104-9594-647a3356296a","Type":"ContainerDied","Data":"d3e3f4f752245ffe166a35e679f85b8e9c0ba5f8fc2490175cf70632d2a70448"} Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.365624 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fdc4c642-cd6a-4104-9594-647a3356296a","Type":"ContainerDied","Data":"e0e37e52b8201c837926d023ce9f34c8b4a1de744d4f6c3ef3f64d5231e0b4df"} Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.365642 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0e37e52b8201c837926d023ce9f34c8b4a1de744d4f6c3ef3f64d5231e0b4df" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.368651 4708 generic.go:334] "Generic (PLEG): container finished" podID="72179ea9-93e3-4e20-907f-bcf317cd4f80" containerID="05ba5b153a55cc9afb5a266092ec64c2ba5e7619492672973c37f7232fed0dea" exitCode=137 Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.368694 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"72179ea9-93e3-4e20-907f-bcf317cd4f80","Type":"ContainerDied","Data":"05ba5b153a55cc9afb5a266092ec64c2ba5e7619492672973c37f7232fed0dea"} Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.368721 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"72179ea9-93e3-4e20-907f-bcf317cd4f80","Type":"ContainerDied","Data":"8b91bdda83f099f5be1141379c1f18a232f82be1870a0902aa1ec810e43f3d6a"} Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.368732 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b91bdda83f099f5be1141379c1f18a232f82be1870a0902aa1ec810e43f3d6a" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.412730 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.424412 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.544227 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72179ea9-93e3-4e20-907f-bcf317cd4f80-logs\") pod \"72179ea9-93e3-4e20-907f-bcf317cd4f80\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.544301 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqwpf\" (UniqueName: \"kubernetes.io/projected/fdc4c642-cd6a-4104-9594-647a3356296a-kube-api-access-hqwpf\") pod \"fdc4c642-cd6a-4104-9594-647a3356296a\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.544349 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-combined-ca-bundle\") pod \"fdc4c642-cd6a-4104-9594-647a3356296a\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.544410 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-combined-ca-bundle\") pod \"72179ea9-93e3-4e20-907f-bcf317cd4f80\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.544440 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-config-data\") pod \"fdc4c642-cd6a-4104-9594-647a3356296a\" (UID: \"fdc4c642-cd6a-4104-9594-647a3356296a\") " Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.544465 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6shb5\" (UniqueName: \"kubernetes.io/projected/72179ea9-93e3-4e20-907f-bcf317cd4f80-kube-api-access-6shb5\") pod \"72179ea9-93e3-4e20-907f-bcf317cd4f80\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.544587 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-config-data\") pod \"72179ea9-93e3-4e20-907f-bcf317cd4f80\" (UID: \"72179ea9-93e3-4e20-907f-bcf317cd4f80\") " Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.544723 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72179ea9-93e3-4e20-907f-bcf317cd4f80-logs" (OuterVolumeSpecName: "logs") pod "72179ea9-93e3-4e20-907f-bcf317cd4f80" (UID: "72179ea9-93e3-4e20-907f-bcf317cd4f80"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.545981 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72179ea9-93e3-4e20-907f-bcf317cd4f80-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.552729 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdc4c642-cd6a-4104-9594-647a3356296a-kube-api-access-hqwpf" (OuterVolumeSpecName: "kube-api-access-hqwpf") pod "fdc4c642-cd6a-4104-9594-647a3356296a" (UID: "fdc4c642-cd6a-4104-9594-647a3356296a"). InnerVolumeSpecName "kube-api-access-hqwpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.553461 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72179ea9-93e3-4e20-907f-bcf317cd4f80-kube-api-access-6shb5" (OuterVolumeSpecName: "kube-api-access-6shb5") pod "72179ea9-93e3-4e20-907f-bcf317cd4f80" (UID: "72179ea9-93e3-4e20-907f-bcf317cd4f80"). InnerVolumeSpecName "kube-api-access-6shb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.573181 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-config-data" (OuterVolumeSpecName: "config-data") pod "72179ea9-93e3-4e20-907f-bcf317cd4f80" (UID: "72179ea9-93e3-4e20-907f-bcf317cd4f80"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.575875 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-config-data" (OuterVolumeSpecName: "config-data") pod "fdc4c642-cd6a-4104-9594-647a3356296a" (UID: "fdc4c642-cd6a-4104-9594-647a3356296a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.581787 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fdc4c642-cd6a-4104-9594-647a3356296a" (UID: "fdc4c642-cd6a-4104-9594-647a3356296a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.582662 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72179ea9-93e3-4e20-907f-bcf317cd4f80" (UID: "72179ea9-93e3-4e20-907f-bcf317cd4f80"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.647828 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqwpf\" (UniqueName: \"kubernetes.io/projected/fdc4c642-cd6a-4104-9594-647a3356296a-kube-api-access-hqwpf\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.647871 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.647883 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.647894 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc4c642-cd6a-4104-9594-647a3356296a-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.647906 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6shb5\" (UniqueName: \"kubernetes.io/projected/72179ea9-93e3-4e20-907f-bcf317cd4f80-kube-api-access-6shb5\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.647917 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72179ea9-93e3-4e20-907f-bcf317cd4f80-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.803554 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.804130 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="d79b40f9-049c-46ea-8ade-f43e58bc8cd4" containerName="kube-state-metrics" containerID="cri-o://19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3" gracePeriod=30 Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.833545 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:30:53 crc kubenswrapper[4708]: I0203 07:30:53.833622 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.264693 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.360584 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx6p5\" (UniqueName: \"kubernetes.io/projected/d79b40f9-049c-46ea-8ade-f43e58bc8cd4-kube-api-access-mx6p5\") pod \"d79b40f9-049c-46ea-8ade-f43e58bc8cd4\" (UID: \"d79b40f9-049c-46ea-8ade-f43e58bc8cd4\") " Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.365730 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d79b40f9-049c-46ea-8ade-f43e58bc8cd4-kube-api-access-mx6p5" (OuterVolumeSpecName: "kube-api-access-mx6p5") pod "d79b40f9-049c-46ea-8ade-f43e58bc8cd4" (UID: "d79b40f9-049c-46ea-8ade-f43e58bc8cd4"). InnerVolumeSpecName "kube-api-access-mx6p5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.378758 4708 generic.go:334] "Generic (PLEG): container finished" podID="d79b40f9-049c-46ea-8ade-f43e58bc8cd4" containerID="19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3" exitCode=2 Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.378839 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.378865 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.378885 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d79b40f9-049c-46ea-8ade-f43e58bc8cd4","Type":"ContainerDied","Data":"19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3"} Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.378942 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d79b40f9-049c-46ea-8ade-f43e58bc8cd4","Type":"ContainerDied","Data":"84179dc416c94c5131983ff97416f553c165cf9deff138a5c5b699e8433a6643"} Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.378896 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.378985 4708 scope.go:117] "RemoveContainer" containerID="19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.437462 4708 scope.go:117] "RemoveContainer" containerID="19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.437558 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: E0203 07:30:54.444522 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3\": container with ID starting with 19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3 not found: ID does not exist" containerID="19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.444578 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3"} err="failed to get container status \"19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3\": rpc error: code = NotFound desc = could not find container \"19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3\": container with ID starting with 19d393ec5187d10badc94368b0b3326e9c23b758741b65c1a26cd19eb27f32b3 not found: ID does not exist" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.459728 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.462925 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx6p5\" (UniqueName: \"kubernetes.io/projected/d79b40f9-049c-46ea-8ade-f43e58bc8cd4-kube-api-access-mx6p5\") on node \"crc\" DevicePath \"\"" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.469069 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.476471 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.488423 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: E0203 07:30:54.488930 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d79b40f9-049c-46ea-8ade-f43e58bc8cd4" containerName="kube-state-metrics" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.488956 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="d79b40f9-049c-46ea-8ade-f43e58bc8cd4" containerName="kube-state-metrics" Feb 03 07:30:54 crc kubenswrapper[4708]: E0203 07:30:54.488995 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdc4c642-cd6a-4104-9594-647a3356296a" containerName="nova-cell1-novncproxy-novncproxy" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.489004 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdc4c642-cd6a-4104-9594-647a3356296a" containerName="nova-cell1-novncproxy-novncproxy" Feb 03 07:30:54 crc kubenswrapper[4708]: E0203 07:30:54.489025 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72179ea9-93e3-4e20-907f-bcf317cd4f80" containerName="nova-metadata-metadata" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.489033 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="72179ea9-93e3-4e20-907f-bcf317cd4f80" containerName="nova-metadata-metadata" Feb 03 07:30:54 crc kubenswrapper[4708]: E0203 07:30:54.489056 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72179ea9-93e3-4e20-907f-bcf317cd4f80" containerName="nova-metadata-log" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.489063 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="72179ea9-93e3-4e20-907f-bcf317cd4f80" containerName="nova-metadata-log" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.489269 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="72179ea9-93e3-4e20-907f-bcf317cd4f80" containerName="nova-metadata-metadata" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.489297 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="d79b40f9-049c-46ea-8ade-f43e58bc8cd4" containerName="kube-state-metrics" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.489309 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="72179ea9-93e3-4e20-907f-bcf317cd4f80" containerName="nova-metadata-log" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.489325 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdc4c642-cd6a-4104-9594-647a3356296a" containerName="nova-cell1-novncproxy-novncproxy" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.490277 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.495391 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.512187 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.512244 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.520281 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.535172 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.540304 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.541210 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.563203 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.571554 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.571928 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v74kv\" (UniqueName: \"kubernetes.io/projected/fde60da0-56e9-4d52-b602-8060c10dfb5a-kube-api-access-v74kv\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.572024 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.572136 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.572249 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.582198 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.591913 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.601990 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.613265 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.614852 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.616444 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.616445 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.625237 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673628 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd57l\" (UniqueName: \"kubernetes.io/projected/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-kube-api-access-vd57l\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673701 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-logs\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673726 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673750 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62wrc\" (UniqueName: \"kubernetes.io/projected/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-kube-api-access-62wrc\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673783 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v74kv\" (UniqueName: \"kubernetes.io/projected/fde60da0-56e9-4d52-b602-8060c10dfb5a-kube-api-access-v74kv\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673824 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673850 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673869 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673892 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673908 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673933 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673972 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.673993 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-config-data\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.674017 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.678599 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.679146 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.691027 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.691364 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde60da0-56e9-4d52-b602-8060c10dfb5a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.693110 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v74kv\" (UniqueName: \"kubernetes.io/projected/fde60da0-56e9-4d52-b602-8060c10dfb5a-kube-api-access-v74kv\") pod \"nova-cell1-novncproxy-0\" (UID: \"fde60da0-56e9-4d52-b602-8060c10dfb5a\") " pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.775677 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-config-data\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.775746 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd57l\" (UniqueName: \"kubernetes.io/projected/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-kube-api-access-vd57l\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.775810 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-logs\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.775854 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.775878 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62wrc\" (UniqueName: \"kubernetes.io/projected/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-kube-api-access-62wrc\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.775907 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.775934 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.775957 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.775971 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.776837 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-logs\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.779367 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.779407 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.780193 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-config-data\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.780510 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.780874 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.781834 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.793296 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd57l\" (UniqueName: \"kubernetes.io/projected/e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697-kube-api-access-vd57l\") pod \"kube-state-metrics-0\" (UID: \"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697\") " pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.793406 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62wrc\" (UniqueName: \"kubernetes.io/projected/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-kube-api-access-62wrc\") pod \"nova-metadata-0\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " pod="openstack/nova-metadata-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.864548 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.875708 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.899197 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.900505 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.903057 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.909491 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 03 07:30:54 crc kubenswrapper[4708]: I0203 07:30:54.933143 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.354175 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 03 07:30:55 crc kubenswrapper[4708]: W0203 07:30:55.357989 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4d6ad9e_1d8f_4d13_a3ae_6e3a283fc697.slice/crio-dc2e6b409e688b6ba66e116e24161ec7e8d77e1551be4dc62581e4ff7c65e010 WatchSource:0}: Error finding container dc2e6b409e688b6ba66e116e24161ec7e8d77e1551be4dc62581e4ff7c65e010: Status 404 returned error can't find the container with id dc2e6b409e688b6ba66e116e24161ec7e8d77e1551be4dc62581e4ff7c65e010 Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.393235 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697","Type":"ContainerStarted","Data":"dc2e6b409e688b6ba66e116e24161ec7e8d77e1551be4dc62581e4ff7c65e010"} Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.394219 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.407131 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.441769 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 03 07:30:55 crc kubenswrapper[4708]: W0203 07:30:55.447694 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfde60da0_56e9_4d52_b602_8060c10dfb5a.slice/crio-578774d2565e0037c9ae334e2f0ae623c155a0d9956d43af4d7d3a5164bc4b1a WatchSource:0}: Error finding container 578774d2565e0037c9ae334e2f0ae623c155a0d9956d43af4d7d3a5164bc4b1a: Status 404 returned error can't find the container with id 578774d2565e0037c9ae334e2f0ae623c155a0d9956d43af4d7d3a5164bc4b1a Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.540034 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:30:55 crc kubenswrapper[4708]: W0203 07:30:55.549018 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ccf3d30_d7b0_48cf_946f_4ba5154fefdf.slice/crio-2bb59a9576007bcab77a792371a53b83bd277db8a3f8080b8a6c4213502427e0 WatchSource:0}: Error finding container 2bb59a9576007bcab77a792371a53b83bd277db8a3f8080b8a6c4213502427e0: Status 404 returned error can't find the container with id 2bb59a9576007bcab77a792371a53b83bd277db8a3f8080b8a6c4213502427e0 Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.606431 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-86mmh"] Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.608534 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.630352 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-86mmh"] Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.694614 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.695024 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.695134 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-config\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.695382 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.695442 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9666s\" (UniqueName: \"kubernetes.io/projected/b0112ed3-3c81-4e21-ae47-89c473987dec-kube-api-access-9666s\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.695544 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.797153 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.797192 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9666s\" (UniqueName: \"kubernetes.io/projected/b0112ed3-3c81-4e21-ae47-89c473987dec-kube-api-access-9666s\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.797230 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.797265 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.797318 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.797361 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-config\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.798186 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-config\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.798663 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.799595 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.800157 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.800635 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b0112ed3-3c81-4e21-ae47-89c473987dec-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.818467 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9666s\" (UniqueName: \"kubernetes.io/projected/b0112ed3-3c81-4e21-ae47-89c473987dec-kube-api-access-9666s\") pod \"dnsmasq-dns-89c5cd4d5-86mmh\" (UID: \"b0112ed3-3c81-4e21-ae47-89c473987dec\") " pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.825096 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.825403 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="ceilometer-central-agent" containerID="cri-o://a731d76dfd3e1491ecf9d2574ad19628f8b8d200fe02236b5d5216f74dfd7e00" gracePeriod=30 Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.825979 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="proxy-httpd" containerID="cri-o://baef3af92ed5808a9fbdc2896f0b3d5eab08cec0ab5fa1e0ff40c3b105d40deb" gracePeriod=30 Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.826088 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="sg-core" containerID="cri-o://961b214c19b1a9b102099f7d89b8f43892b409daac635b8d6c905f95706420db" gracePeriod=30 Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.826177 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="ceilometer-notification-agent" containerID="cri-o://5d0e6f2364c854eb3c38730b61ca6f52a8591bd813bc300466789bf1b07a830d" gracePeriod=30 Feb 03 07:30:55 crc kubenswrapper[4708]: I0203 07:30:55.941841 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.126602 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72179ea9-93e3-4e20-907f-bcf317cd4f80" path="/var/lib/kubelet/pods/72179ea9-93e3-4e20-907f-bcf317cd4f80/volumes" Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.128006 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d79b40f9-049c-46ea-8ade-f43e58bc8cd4" path="/var/lib/kubelet/pods/d79b40f9-049c-46ea-8ade-f43e58bc8cd4/volumes" Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.128573 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdc4c642-cd6a-4104-9594-647a3356296a" path="/var/lib/kubelet/pods/fdc4c642-cd6a-4104-9594-647a3356296a/volumes" Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.402701 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fde60da0-56e9-4d52-b602-8060c10dfb5a","Type":"ContainerStarted","Data":"df721917d56fb89fcac957dcacdda745bf0ffde9708eabac3c2fcfaa4caf1037"} Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.402766 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fde60da0-56e9-4d52-b602-8060c10dfb5a","Type":"ContainerStarted","Data":"578774d2565e0037c9ae334e2f0ae623c155a0d9956d43af4d7d3a5164bc4b1a"} Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.404399 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697","Type":"ContainerStarted","Data":"cccb0860eb489ac7a1abc45293a4263ea78f24e52bf1d540f605ae7e9b5c0a7b"} Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.404502 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.411018 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf","Type":"ContainerStarted","Data":"b637062e287b3a9d9da7a42f869927094238cfc3758f6ce4455b1d43810a7178"} Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.411058 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf","Type":"ContainerStarted","Data":"497efe5a8956ca537466620d96a5228c8c12f319ac4616d1cce2cca596452583"} Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.411074 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf","Type":"ContainerStarted","Data":"2bb59a9576007bcab77a792371a53b83bd277db8a3f8080b8a6c4213502427e0"} Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.425288 4708 generic.go:334] "Generic (PLEG): container finished" podID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerID="baef3af92ed5808a9fbdc2896f0b3d5eab08cec0ab5fa1e0ff40c3b105d40deb" exitCode=0 Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.425640 4708 generic.go:334] "Generic (PLEG): container finished" podID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerID="961b214c19b1a9b102099f7d89b8f43892b409daac635b8d6c905f95706420db" exitCode=2 Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.425653 4708 generic.go:334] "Generic (PLEG): container finished" podID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerID="a731d76dfd3e1491ecf9d2574ad19628f8b8d200fe02236b5d5216f74dfd7e00" exitCode=0 Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.425555 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19739ef7-2ac6-46b5-84fb-308b16671ecc","Type":"ContainerDied","Data":"baef3af92ed5808a9fbdc2896f0b3d5eab08cec0ab5fa1e0ff40c3b105d40deb"} Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.425970 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19739ef7-2ac6-46b5-84fb-308b16671ecc","Type":"ContainerDied","Data":"961b214c19b1a9b102099f7d89b8f43892b409daac635b8d6c905f95706420db"} Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.426019 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19739ef7-2ac6-46b5-84fb-308b16671ecc","Type":"ContainerDied","Data":"a731d76dfd3e1491ecf9d2574ad19628f8b8d200fe02236b5d5216f74dfd7e00"} Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.464939 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.464920691 podStartE2EDuration="2.464920691s" podCreationTimestamp="2026-02-03 07:30:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:30:56.431448542 +0000 UTC m=+1235.413395349" watchObservedRunningTime="2026-02-03 07:30:56.464920691 +0000 UTC m=+1235.446867498" Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.478679 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.097398011 podStartE2EDuration="2.478656038s" podCreationTimestamp="2026-02-03 07:30:54 +0000 UTC" firstStartedPulling="2026-02-03 07:30:55.360152994 +0000 UTC m=+1234.342099801" lastFinishedPulling="2026-02-03 07:30:55.741411011 +0000 UTC m=+1234.723357828" observedRunningTime="2026-02-03 07:30:56.472601349 +0000 UTC m=+1235.454548156" watchObservedRunningTime="2026-02-03 07:30:56.478656038 +0000 UTC m=+1235.460602855" Feb 03 07:30:56 crc kubenswrapper[4708]: W0203 07:30:56.490586 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0112ed3_3c81_4e21_ae47_89c473987dec.slice/crio-495b6efbab562a072b40537fc9ca693d28d426754ed59b98863bf21c77c60fa1 WatchSource:0}: Error finding container 495b6efbab562a072b40537fc9ca693d28d426754ed59b98863bf21c77c60fa1: Status 404 returned error can't find the container with id 495b6efbab562a072b40537fc9ca693d28d426754ed59b98863bf21c77c60fa1 Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.508857 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-86mmh"] Feb 03 07:30:56 crc kubenswrapper[4708]: I0203 07:30:56.517868 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.517849938 podStartE2EDuration="2.517849938s" podCreationTimestamp="2026-02-03 07:30:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:30:56.493185393 +0000 UTC m=+1235.475132200" watchObservedRunningTime="2026-02-03 07:30:56.517849938 +0000 UTC m=+1235.499796745" Feb 03 07:30:57 crc kubenswrapper[4708]: I0203 07:30:57.444290 4708 generic.go:334] "Generic (PLEG): container finished" podID="b0112ed3-3c81-4e21-ae47-89c473987dec" containerID="66b73251522e50aedb1ebe38a9f055eacdc4f140af8261a31d92084c2ed5d243" exitCode=0 Feb 03 07:30:57 crc kubenswrapper[4708]: I0203 07:30:57.446421 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" event={"ID":"b0112ed3-3c81-4e21-ae47-89c473987dec","Type":"ContainerDied","Data":"66b73251522e50aedb1ebe38a9f055eacdc4f140af8261a31d92084c2ed5d243"} Feb 03 07:30:57 crc kubenswrapper[4708]: I0203 07:30:57.446471 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" event={"ID":"b0112ed3-3c81-4e21-ae47-89c473987dec","Type":"ContainerStarted","Data":"495b6efbab562a072b40537fc9ca693d28d426754ed59b98863bf21c77c60fa1"} Feb 03 07:30:58 crc kubenswrapper[4708]: I0203 07:30:58.355612 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:30:58 crc kubenswrapper[4708]: I0203 07:30:58.454022 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" event={"ID":"b0112ed3-3c81-4e21-ae47-89c473987dec","Type":"ContainerStarted","Data":"0358e0092a1c31b8e603d7bf03ea0e5cebe263b484b0ee66faae2c4576532918"} Feb 03 07:30:58 crc kubenswrapper[4708]: I0203 07:30:58.454159 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" containerName="nova-api-log" containerID="cri-o://ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054" gracePeriod=30 Feb 03 07:30:58 crc kubenswrapper[4708]: I0203 07:30:58.454271 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" containerName="nova-api-api" containerID="cri-o://71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73" gracePeriod=30 Feb 03 07:30:58 crc kubenswrapper[4708]: I0203 07:30:58.482342 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" podStartSLOduration=3.482317241 podStartE2EDuration="3.482317241s" podCreationTimestamp="2026-02-03 07:30:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:30:58.472998542 +0000 UTC m=+1237.454945359" watchObservedRunningTime="2026-02-03 07:30:58.482317241 +0000 UTC m=+1237.464264048" Feb 03 07:30:59 crc kubenswrapper[4708]: I0203 07:30:59.464217 4708 generic.go:334] "Generic (PLEG): container finished" podID="a461a3e7-34d0-428b-b511-adbe5443a526" containerID="ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054" exitCode=143 Feb 03 07:30:59 crc kubenswrapper[4708]: I0203 07:30:59.465226 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a461a3e7-34d0-428b-b511-adbe5443a526","Type":"ContainerDied","Data":"ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054"} Feb 03 07:30:59 crc kubenswrapper[4708]: I0203 07:30:59.465253 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:30:59 crc kubenswrapper[4708]: I0203 07:30:59.865605 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:30:59 crc kubenswrapper[4708]: I0203 07:30:59.934883 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:30:59 crc kubenswrapper[4708]: I0203 07:30:59.935024 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:31:00 crc kubenswrapper[4708]: I0203 07:31:00.477046 4708 generic.go:334] "Generic (PLEG): container finished" podID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerID="5d0e6f2364c854eb3c38730b61ca6f52a8591bd813bc300466789bf1b07a830d" exitCode=0 Feb 03 07:31:00 crc kubenswrapper[4708]: I0203 07:31:00.477106 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19739ef7-2ac6-46b5-84fb-308b16671ecc","Type":"ContainerDied","Data":"5d0e6f2364c854eb3c38730b61ca6f52a8591bd813bc300466789bf1b07a830d"} Feb 03 07:31:00 crc kubenswrapper[4708]: I0203 07:31:00.813321 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.008374 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-run-httpd\") pod \"19739ef7-2ac6-46b5-84fb-308b16671ecc\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.008455 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-log-httpd\") pod \"19739ef7-2ac6-46b5-84fb-308b16671ecc\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.008529 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-config-data\") pod \"19739ef7-2ac6-46b5-84fb-308b16671ecc\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.008560 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-combined-ca-bundle\") pod \"19739ef7-2ac6-46b5-84fb-308b16671ecc\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.008622 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsf8v\" (UniqueName: \"kubernetes.io/projected/19739ef7-2ac6-46b5-84fb-308b16671ecc-kube-api-access-tsf8v\") pod \"19739ef7-2ac6-46b5-84fb-308b16671ecc\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.008649 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-sg-core-conf-yaml\") pod \"19739ef7-2ac6-46b5-84fb-308b16671ecc\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.008671 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-scripts\") pod \"19739ef7-2ac6-46b5-84fb-308b16671ecc\" (UID: \"19739ef7-2ac6-46b5-84fb-308b16671ecc\") " Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.009160 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "19739ef7-2ac6-46b5-84fb-308b16671ecc" (UID: "19739ef7-2ac6-46b5-84fb-308b16671ecc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.009568 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "19739ef7-2ac6-46b5-84fb-308b16671ecc" (UID: "19739ef7-2ac6-46b5-84fb-308b16671ecc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.009941 4708 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.009973 4708 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19739ef7-2ac6-46b5-84fb-308b16671ecc-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.015987 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-scripts" (OuterVolumeSpecName: "scripts") pod "19739ef7-2ac6-46b5-84fb-308b16671ecc" (UID: "19739ef7-2ac6-46b5-84fb-308b16671ecc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.016085 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19739ef7-2ac6-46b5-84fb-308b16671ecc-kube-api-access-tsf8v" (OuterVolumeSpecName: "kube-api-access-tsf8v") pod "19739ef7-2ac6-46b5-84fb-308b16671ecc" (UID: "19739ef7-2ac6-46b5-84fb-308b16671ecc"). InnerVolumeSpecName "kube-api-access-tsf8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.045143 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "19739ef7-2ac6-46b5-84fb-308b16671ecc" (UID: "19739ef7-2ac6-46b5-84fb-308b16671ecc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.094638 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19739ef7-2ac6-46b5-84fb-308b16671ecc" (UID: "19739ef7-2ac6-46b5-84fb-308b16671ecc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.112440 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.112485 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsf8v\" (UniqueName: \"kubernetes.io/projected/19739ef7-2ac6-46b5-84fb-308b16671ecc-kube-api-access-tsf8v\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.112507 4708 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.112520 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.117596 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-config-data" (OuterVolumeSpecName: "config-data") pod "19739ef7-2ac6-46b5-84fb-308b16671ecc" (UID: "19739ef7-2ac6-46b5-84fb-308b16671ecc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.213858 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19739ef7-2ac6-46b5-84fb-308b16671ecc-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.489004 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19739ef7-2ac6-46b5-84fb-308b16671ecc","Type":"ContainerDied","Data":"ccd55ffec04468346af02e0e15f9904749dcde70ea92a14de2c29ae9433d0142"} Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.489053 4708 scope.go:117] "RemoveContainer" containerID="baef3af92ed5808a9fbdc2896f0b3d5eab08cec0ab5fa1e0ff40c3b105d40deb" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.489094 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.518960 4708 scope.go:117] "RemoveContainer" containerID="961b214c19b1a9b102099f7d89b8f43892b409daac635b8d6c905f95706420db" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.537612 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.544931 4708 scope.go:117] "RemoveContainer" containerID="5d0e6f2364c854eb3c38730b61ca6f52a8591bd813bc300466789bf1b07a830d" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.554913 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.574086 4708 scope.go:117] "RemoveContainer" containerID="a731d76dfd3e1491ecf9d2574ad19628f8b8d200fe02236b5d5216f74dfd7e00" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.576790 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:31:01 crc kubenswrapper[4708]: E0203 07:31:01.577362 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="ceilometer-central-agent" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.577375 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="ceilometer-central-agent" Feb 03 07:31:01 crc kubenswrapper[4708]: E0203 07:31:01.577398 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="proxy-httpd" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.577404 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="proxy-httpd" Feb 03 07:31:01 crc kubenswrapper[4708]: E0203 07:31:01.577421 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="ceilometer-notification-agent" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.577427 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="ceilometer-notification-agent" Feb 03 07:31:01 crc kubenswrapper[4708]: E0203 07:31:01.577447 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="sg-core" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.577452 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="sg-core" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.577635 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="sg-core" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.577652 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="proxy-httpd" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.577669 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="ceilometer-central-agent" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.577678 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" containerName="ceilometer-notification-agent" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.579881 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.583256 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.584286 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.589413 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.599825 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.724319 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.724736 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11c88b16-1207-482e-af23-035b4b973d3b-run-httpd\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.724826 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11c88b16-1207-482e-af23-035b4b973d3b-log-httpd\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.724868 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.724975 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzcnr\" (UniqueName: \"kubernetes.io/projected/11c88b16-1207-482e-af23-035b4b973d3b-kube-api-access-wzcnr\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.725038 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-scripts\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.725189 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.725313 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-config-data\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.827388 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11c88b16-1207-482e-af23-035b4b973d3b-run-httpd\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.827434 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.827455 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11c88b16-1207-482e-af23-035b4b973d3b-log-httpd\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.828039 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11c88b16-1207-482e-af23-035b4b973d3b-run-httpd\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.828136 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzcnr\" (UniqueName: \"kubernetes.io/projected/11c88b16-1207-482e-af23-035b4b973d3b-kube-api-access-wzcnr\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.828177 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-scripts\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.828179 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/11c88b16-1207-482e-af23-035b4b973d3b-log-httpd\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.828326 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.828477 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-config-data\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.828554 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.832882 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.833465 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-scripts\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.833846 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.834712 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.835615 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c88b16-1207-482e-af23-035b4b973d3b-config-data\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.895742 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzcnr\" (UniqueName: \"kubernetes.io/projected/11c88b16-1207-482e-af23-035b4b973d3b-kube-api-access-wzcnr\") pod \"ceilometer-0\" (UID: \"11c88b16-1207-482e-af23-035b4b973d3b\") " pod="openstack/ceilometer-0" Feb 03 07:31:01 crc kubenswrapper[4708]: I0203 07:31:01.906010 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.039545 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.113225 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19739ef7-2ac6-46b5-84fb-308b16671ecc" path="/var/lib/kubelet/pods/19739ef7-2ac6-46b5-84fb-308b16671ecc/volumes" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.235292 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skcbl\" (UniqueName: \"kubernetes.io/projected/a461a3e7-34d0-428b-b511-adbe5443a526-kube-api-access-skcbl\") pod \"a461a3e7-34d0-428b-b511-adbe5443a526\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.235379 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-config-data\") pod \"a461a3e7-34d0-428b-b511-adbe5443a526\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.235422 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a461a3e7-34d0-428b-b511-adbe5443a526-logs\") pod \"a461a3e7-34d0-428b-b511-adbe5443a526\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.235485 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-combined-ca-bundle\") pod \"a461a3e7-34d0-428b-b511-adbe5443a526\" (UID: \"a461a3e7-34d0-428b-b511-adbe5443a526\") " Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.236629 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a461a3e7-34d0-428b-b511-adbe5443a526-logs" (OuterVolumeSpecName: "logs") pod "a461a3e7-34d0-428b-b511-adbe5443a526" (UID: "a461a3e7-34d0-428b-b511-adbe5443a526"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.240960 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a461a3e7-34d0-428b-b511-adbe5443a526-kube-api-access-skcbl" (OuterVolumeSpecName: "kube-api-access-skcbl") pod "a461a3e7-34d0-428b-b511-adbe5443a526" (UID: "a461a3e7-34d0-428b-b511-adbe5443a526"). InnerVolumeSpecName "kube-api-access-skcbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.268091 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-config-data" (OuterVolumeSpecName: "config-data") pod "a461a3e7-34d0-428b-b511-adbe5443a526" (UID: "a461a3e7-34d0-428b-b511-adbe5443a526"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.273860 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a461a3e7-34d0-428b-b511-adbe5443a526" (UID: "a461a3e7-34d0-428b-b511-adbe5443a526"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.338141 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.338187 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a461a3e7-34d0-428b-b511-adbe5443a526-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.338196 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a461a3e7-34d0-428b-b511-adbe5443a526-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.338206 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skcbl\" (UniqueName: \"kubernetes.io/projected/a461a3e7-34d0-428b-b511-adbe5443a526-kube-api-access-skcbl\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.378971 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.497713 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11c88b16-1207-482e-af23-035b4b973d3b","Type":"ContainerStarted","Data":"f7bf5ff569843a44d063dcee44a61c174817350099ae945bd0adb1b2b9cc7530"} Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.499663 4708 generic.go:334] "Generic (PLEG): container finished" podID="a461a3e7-34d0-428b-b511-adbe5443a526" containerID="71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73" exitCode=0 Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.499835 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a461a3e7-34d0-428b-b511-adbe5443a526","Type":"ContainerDied","Data":"71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73"} Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.499898 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.500046 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a461a3e7-34d0-428b-b511-adbe5443a526","Type":"ContainerDied","Data":"254fa7296522ef3c40b0631321202b5c28d7ed6e7fc1b9b3a7b0215b8d788b1b"} Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.500070 4708 scope.go:117] "RemoveContainer" containerID="71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.538482 4708 scope.go:117] "RemoveContainer" containerID="ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.569283 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.579439 4708 scope.go:117] "RemoveContainer" containerID="71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73" Feb 03 07:31:02 crc kubenswrapper[4708]: E0203 07:31:02.580970 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73\": container with ID starting with 71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73 not found: ID does not exist" containerID="71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.581026 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73"} err="failed to get container status \"71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73\": rpc error: code = NotFound desc = could not find container \"71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73\": container with ID starting with 71054a3525da66cf3ca4c88a304504066d25873651819c8ddc4da025b4beec73 not found: ID does not exist" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.581053 4708 scope.go:117] "RemoveContainer" containerID="ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054" Feb 03 07:31:02 crc kubenswrapper[4708]: E0203 07:31:02.581415 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054\": container with ID starting with ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054 not found: ID does not exist" containerID="ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.581444 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054"} err="failed to get container status \"ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054\": rpc error: code = NotFound desc = could not find container \"ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054\": container with ID starting with ed566b066f27d80ad31128568961bd635c149283ae4563371bf1a166b2eb6054 not found: ID does not exist" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.611457 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.619085 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:02 crc kubenswrapper[4708]: E0203 07:31:02.619542 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" containerName="nova-api-log" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.619566 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" containerName="nova-api-log" Feb 03 07:31:02 crc kubenswrapper[4708]: E0203 07:31:02.619577 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" containerName="nova-api-api" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.619583 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" containerName="nova-api-api" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.619757 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" containerName="nova-api-log" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.619777 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" containerName="nova-api-api" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.620807 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.623440 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.623589 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.623691 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.627527 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.647002 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.647091 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwv46\" (UniqueName: \"kubernetes.io/projected/6ce4b430-d4a5-4a52-936a-47bce054b824-kube-api-access-rwv46\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.647117 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-public-tls-certs\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.647162 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.647239 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce4b430-d4a5-4a52-936a-47bce054b824-logs\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.647278 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-config-data\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.748867 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.749017 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce4b430-d4a5-4a52-936a-47bce054b824-logs\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.749063 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-config-data\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.749136 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.749198 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwv46\" (UniqueName: \"kubernetes.io/projected/6ce4b430-d4a5-4a52-936a-47bce054b824-kube-api-access-rwv46\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.749222 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-public-tls-certs\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.750922 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce4b430-d4a5-4a52-936a-47bce054b824-logs\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.753428 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-public-tls-certs\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.754279 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.756665 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.760574 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-config-data\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.781128 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwv46\" (UniqueName: \"kubernetes.io/projected/6ce4b430-d4a5-4a52-936a-47bce054b824-kube-api-access-rwv46\") pod \"nova-api-0\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " pod="openstack/nova-api-0" Feb 03 07:31:02 crc kubenswrapper[4708]: I0203 07:31:02.935950 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:31:03 crc kubenswrapper[4708]: I0203 07:31:03.426876 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:03 crc kubenswrapper[4708]: I0203 07:31:03.513751 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11c88b16-1207-482e-af23-035b4b973d3b","Type":"ContainerStarted","Data":"c30fd5102476c080d51e964721caa12e974e5e8c6487a07465b501fcbe033de8"} Feb 03 07:31:03 crc kubenswrapper[4708]: I0203 07:31:03.516991 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce4b430-d4a5-4a52-936a-47bce054b824","Type":"ContainerStarted","Data":"23aa1ff04578b47322b5f9eaa53c18a2d383d118d0d34fca21207c9de4d54ee0"} Feb 03 07:31:04 crc kubenswrapper[4708]: I0203 07:31:04.109936 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a461a3e7-34d0-428b-b511-adbe5443a526" path="/var/lib/kubelet/pods/a461a3e7-34d0-428b-b511-adbe5443a526/volumes" Feb 03 07:31:04 crc kubenswrapper[4708]: I0203 07:31:04.532591 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11c88b16-1207-482e-af23-035b4b973d3b","Type":"ContainerStarted","Data":"d19e47262e59720aeab064f8d06f409326e8323dcc8ca83d436c5b495c948483"} Feb 03 07:31:04 crc kubenswrapper[4708]: I0203 07:31:04.536110 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce4b430-d4a5-4a52-936a-47bce054b824","Type":"ContainerStarted","Data":"c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb"} Feb 03 07:31:04 crc kubenswrapper[4708]: I0203 07:31:04.536134 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce4b430-d4a5-4a52-936a-47bce054b824","Type":"ContainerStarted","Data":"07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167"} Feb 03 07:31:04 crc kubenswrapper[4708]: I0203 07:31:04.561467 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.561446377 podStartE2EDuration="2.561446377s" podCreationTimestamp="2026-02-03 07:31:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:31:04.553841551 +0000 UTC m=+1243.535788358" watchObservedRunningTime="2026-02-03 07:31:04.561446377 +0000 UTC m=+1243.543393184" Feb 03 07:31:04 crc kubenswrapper[4708]: I0203 07:31:04.865382 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:31:04 crc kubenswrapper[4708]: I0203 07:31:04.884941 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 03 07:31:04 crc kubenswrapper[4708]: I0203 07:31:04.885055 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:31:04 crc kubenswrapper[4708]: I0203 07:31:04.933942 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 07:31:04 crc kubenswrapper[4708]: I0203 07:31:04.933983 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.563135 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11c88b16-1207-482e-af23-035b4b973d3b","Type":"ContainerStarted","Data":"a7c9738ff00c66fbed3ebd18e8f79102cbb26ca8ad224057fbe2140d46b4db06"} Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.600711 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.843275 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-57zpw"] Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.844559 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.847073 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.847441 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.862159 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-57zpw"] Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.917955 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-config-data\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.918102 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5wht\" (UniqueName: \"kubernetes.io/projected/e55db381-d6ba-4d52-981a-918aacdedc97-kube-api-access-q5wht\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.918181 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-scripts\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.918523 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.945013 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-89c5cd4d5-86mmh" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.946947 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:31:05 crc kubenswrapper[4708]: I0203 07:31:05.946949 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.009250 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rzctf"] Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.009512 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" podUID="e7bbff83-97e5-41e1-8445-0c7deba26930" containerName="dnsmasq-dns" containerID="cri-o://22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89" gracePeriod=10 Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.021533 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-config-data\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.021632 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5wht\" (UniqueName: \"kubernetes.io/projected/e55db381-d6ba-4d52-981a-918aacdedc97-kube-api-access-q5wht\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.024056 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-scripts\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.024357 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.030260 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-scripts\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.033465 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.035262 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-config-data\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.074516 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5wht\" (UniqueName: \"kubernetes.io/projected/e55db381-d6ba-4d52-981a-918aacdedc97-kube-api-access-q5wht\") pod \"nova-cell1-cell-mapping-57zpw\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.169109 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.494306 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.540881 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-sb\") pod \"e7bbff83-97e5-41e1-8445-0c7deba26930\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.541029 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-nb\") pod \"e7bbff83-97e5-41e1-8445-0c7deba26930\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.541067 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-swift-storage-0\") pod \"e7bbff83-97e5-41e1-8445-0c7deba26930\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.541091 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-svc\") pod \"e7bbff83-97e5-41e1-8445-0c7deba26930\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.541149 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-config\") pod \"e7bbff83-97e5-41e1-8445-0c7deba26930\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.541184 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkxg2\" (UniqueName: \"kubernetes.io/projected/e7bbff83-97e5-41e1-8445-0c7deba26930-kube-api-access-zkxg2\") pod \"e7bbff83-97e5-41e1-8445-0c7deba26930\" (UID: \"e7bbff83-97e5-41e1-8445-0c7deba26930\") " Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.566119 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7bbff83-97e5-41e1-8445-0c7deba26930-kube-api-access-zkxg2" (OuterVolumeSpecName: "kube-api-access-zkxg2") pod "e7bbff83-97e5-41e1-8445-0c7deba26930" (UID: "e7bbff83-97e5-41e1-8445-0c7deba26930"). InnerVolumeSpecName "kube-api-access-zkxg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.585993 4708 generic.go:334] "Generic (PLEG): container finished" podID="e7bbff83-97e5-41e1-8445-0c7deba26930" containerID="22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89" exitCode=0 Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.589573 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.590379 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" event={"ID":"e7bbff83-97e5-41e1-8445-0c7deba26930","Type":"ContainerDied","Data":"22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89"} Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.590414 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-rzctf" event={"ID":"e7bbff83-97e5-41e1-8445-0c7deba26930","Type":"ContainerDied","Data":"9f17c28e9e2749c7f93f995de47274949d70a5447a62578d655124ffbcbd3fcf"} Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.590431 4708 scope.go:117] "RemoveContainer" containerID="22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.634809 4708 scope.go:117] "RemoveContainer" containerID="a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.643729 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkxg2\" (UniqueName: \"kubernetes.io/projected/e7bbff83-97e5-41e1-8445-0c7deba26930-kube-api-access-zkxg2\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.718376 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-57zpw"] Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.745305 4708 scope.go:117] "RemoveContainer" containerID="22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89" Feb 03 07:31:06 crc kubenswrapper[4708]: E0203 07:31:06.746258 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89\": container with ID starting with 22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89 not found: ID does not exist" containerID="22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.746290 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89"} err="failed to get container status \"22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89\": rpc error: code = NotFound desc = could not find container \"22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89\": container with ID starting with 22a95e27b5e5bb614fa197c59d103cc7466461cd9d655a7bacccb36e5cff9e89 not found: ID does not exist" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.746309 4708 scope.go:117] "RemoveContainer" containerID="a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503" Feb 03 07:31:06 crc kubenswrapper[4708]: E0203 07:31:06.746605 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503\": container with ID starting with a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503 not found: ID does not exist" containerID="a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.746669 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503"} err="failed to get container status \"a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503\": rpc error: code = NotFound desc = could not find container \"a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503\": container with ID starting with a65b58de2d691eab1ed582c8e2cd81187a4496310d82c0d8cfec7bd83b364503 not found: ID does not exist" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.779185 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e7bbff83-97e5-41e1-8445-0c7deba26930" (UID: "e7bbff83-97e5-41e1-8445-0c7deba26930"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.781366 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e7bbff83-97e5-41e1-8445-0c7deba26930" (UID: "e7bbff83-97e5-41e1-8445-0c7deba26930"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.800184 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e7bbff83-97e5-41e1-8445-0c7deba26930" (UID: "e7bbff83-97e5-41e1-8445-0c7deba26930"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.807330 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e7bbff83-97e5-41e1-8445-0c7deba26930" (UID: "e7bbff83-97e5-41e1-8445-0c7deba26930"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.821971 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-config" (OuterVolumeSpecName: "config") pod "e7bbff83-97e5-41e1-8445-0c7deba26930" (UID: "e7bbff83-97e5-41e1-8445-0c7deba26930"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.847322 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.847362 4708 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.847377 4708 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.847388 4708 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-config\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.847399 4708 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7bbff83-97e5-41e1-8445-0c7deba26930-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.921848 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rzctf"] Feb 03 07:31:06 crc kubenswrapper[4708]: I0203 07:31:06.935122 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rzctf"] Feb 03 07:31:07 crc kubenswrapper[4708]: I0203 07:31:07.612597 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"11c88b16-1207-482e-af23-035b4b973d3b","Type":"ContainerStarted","Data":"af1b3c4f1f863daaa9537a8bd1a04523eef71bd25635dc9a18daa04c7c78e850"} Feb 03 07:31:07 crc kubenswrapper[4708]: I0203 07:31:07.614296 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 03 07:31:07 crc kubenswrapper[4708]: I0203 07:31:07.617100 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-57zpw" event={"ID":"e55db381-d6ba-4d52-981a-918aacdedc97","Type":"ContainerStarted","Data":"b631ce6336dd3b267f6a03ac6dbed496ef56579e7337329f48407162177f45e4"} Feb 03 07:31:07 crc kubenswrapper[4708]: I0203 07:31:07.617133 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-57zpw" event={"ID":"e55db381-d6ba-4d52-981a-918aacdedc97","Type":"ContainerStarted","Data":"77eeb36c4a8ad5812ed5fd7ee7b648bba3fb197b773b773dbc23756ff35483c9"} Feb 03 07:31:07 crc kubenswrapper[4708]: I0203 07:31:07.632169 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.668691832 podStartE2EDuration="6.632153953s" podCreationTimestamp="2026-02-03 07:31:01 +0000 UTC" firstStartedPulling="2026-02-03 07:31:02.381427905 +0000 UTC m=+1241.363374712" lastFinishedPulling="2026-02-03 07:31:06.344890036 +0000 UTC m=+1245.326836833" observedRunningTime="2026-02-03 07:31:07.631769113 +0000 UTC m=+1246.613715930" watchObservedRunningTime="2026-02-03 07:31:07.632153953 +0000 UTC m=+1246.614100760" Feb 03 07:31:07 crc kubenswrapper[4708]: I0203 07:31:07.659695 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-57zpw" podStartSLOduration=2.6596734570000002 podStartE2EDuration="2.659673457s" podCreationTimestamp="2026-02-03 07:31:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:31:07.648106754 +0000 UTC m=+1246.630053561" watchObservedRunningTime="2026-02-03 07:31:07.659673457 +0000 UTC m=+1246.641620274" Feb 03 07:31:08 crc kubenswrapper[4708]: I0203 07:31:08.117566 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7bbff83-97e5-41e1-8445-0c7deba26930" path="/var/lib/kubelet/pods/e7bbff83-97e5-41e1-8445-0c7deba26930/volumes" Feb 03 07:31:11 crc kubenswrapper[4708]: I0203 07:31:11.650328 4708 generic.go:334] "Generic (PLEG): container finished" podID="e55db381-d6ba-4d52-981a-918aacdedc97" containerID="b631ce6336dd3b267f6a03ac6dbed496ef56579e7337329f48407162177f45e4" exitCode=0 Feb 03 07:31:11 crc kubenswrapper[4708]: I0203 07:31:11.650358 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-57zpw" event={"ID":"e55db381-d6ba-4d52-981a-918aacdedc97","Type":"ContainerDied","Data":"b631ce6336dd3b267f6a03ac6dbed496ef56579e7337329f48407162177f45e4"} Feb 03 07:31:12 crc kubenswrapper[4708]: I0203 07:31:12.937215 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:31:12 crc kubenswrapper[4708]: I0203 07:31:12.938416 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.079181 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.172768 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-config-data\") pod \"e55db381-d6ba-4d52-981a-918aacdedc97\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.172941 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5wht\" (UniqueName: \"kubernetes.io/projected/e55db381-d6ba-4d52-981a-918aacdedc97-kube-api-access-q5wht\") pod \"e55db381-d6ba-4d52-981a-918aacdedc97\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.172998 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-scripts\") pod \"e55db381-d6ba-4d52-981a-918aacdedc97\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.173023 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-combined-ca-bundle\") pod \"e55db381-d6ba-4d52-981a-918aacdedc97\" (UID: \"e55db381-d6ba-4d52-981a-918aacdedc97\") " Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.178307 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e55db381-d6ba-4d52-981a-918aacdedc97-kube-api-access-q5wht" (OuterVolumeSpecName: "kube-api-access-q5wht") pod "e55db381-d6ba-4d52-981a-918aacdedc97" (UID: "e55db381-d6ba-4d52-981a-918aacdedc97"). InnerVolumeSpecName "kube-api-access-q5wht". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.178934 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-scripts" (OuterVolumeSpecName: "scripts") pod "e55db381-d6ba-4d52-981a-918aacdedc97" (UID: "e55db381-d6ba-4d52-981a-918aacdedc97"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.200716 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e55db381-d6ba-4d52-981a-918aacdedc97" (UID: "e55db381-d6ba-4d52-981a-918aacdedc97"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.202632 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-config-data" (OuterVolumeSpecName: "config-data") pod "e55db381-d6ba-4d52-981a-918aacdedc97" (UID: "e55db381-d6ba-4d52-981a-918aacdedc97"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.276133 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5wht\" (UniqueName: \"kubernetes.io/projected/e55db381-d6ba-4d52-981a-918aacdedc97-kube-api-access-q5wht\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.276172 4708 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-scripts\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.276185 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.276199 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e55db381-d6ba-4d52-981a-918aacdedc97-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.677109 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-57zpw" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.677105 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-57zpw" event={"ID":"e55db381-d6ba-4d52-981a-918aacdedc97","Type":"ContainerDied","Data":"77eeb36c4a8ad5812ed5fd7ee7b648bba3fb197b773b773dbc23756ff35483c9"} Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.677786 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77eeb36c4a8ad5812ed5fd7ee7b648bba3fb197b773b773dbc23756ff35483c9" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.844374 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.879065 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.879295 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="08e414ef-4cf0-42c0-931b-299c7ff1a7a9" containerName="nova-scheduler-scheduler" containerID="cri-o://a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34" gracePeriod=30 Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.893703 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.893933 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerName="nova-metadata-log" containerID="cri-o://497efe5a8956ca537466620d96a5228c8c12f319ac4616d1cce2cca596452583" gracePeriod=30 Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.894348 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerName="nova-metadata-metadata" containerID="cri-o://b637062e287b3a9d9da7a42f869927094238cfc3758f6ce4455b1d43810a7178" gracePeriod=30 Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.949031 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 03 07:31:13 crc kubenswrapper[4708]: I0203 07:31:13.949373 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:31:14 crc kubenswrapper[4708]: I0203 07:31:14.687003 4708 generic.go:334] "Generic (PLEG): container finished" podID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerID="497efe5a8956ca537466620d96a5228c8c12f319ac4616d1cce2cca596452583" exitCode=143 Feb 03 07:31:14 crc kubenswrapper[4708]: I0203 07:31:14.687177 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerName="nova-api-log" containerID="cri-o://07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167" gracePeriod=30 Feb 03 07:31:14 crc kubenswrapper[4708]: I0203 07:31:14.687421 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf","Type":"ContainerDied","Data":"497efe5a8956ca537466620d96a5228c8c12f319ac4616d1cce2cca596452583"} Feb 03 07:31:14 crc kubenswrapper[4708]: I0203 07:31:14.687738 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerName="nova-api-api" containerID="cri-o://c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb" gracePeriod=30 Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.602028 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.622081 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-config-data\") pod \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.622298 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2d2l\" (UniqueName: \"kubernetes.io/projected/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-kube-api-access-d2d2l\") pod \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.622358 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-combined-ca-bundle\") pod \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\" (UID: \"08e414ef-4cf0-42c0-931b-299c7ff1a7a9\") " Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.628995 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-kube-api-access-d2d2l" (OuterVolumeSpecName: "kube-api-access-d2d2l") pod "08e414ef-4cf0-42c0-931b-299c7ff1a7a9" (UID: "08e414ef-4cf0-42c0-931b-299c7ff1a7a9"). InnerVolumeSpecName "kube-api-access-d2d2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.663049 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-config-data" (OuterVolumeSpecName: "config-data") pod "08e414ef-4cf0-42c0-931b-299c7ff1a7a9" (UID: "08e414ef-4cf0-42c0-931b-299c7ff1a7a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.670125 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08e414ef-4cf0-42c0-931b-299c7ff1a7a9" (UID: "08e414ef-4cf0-42c0-931b-299c7ff1a7a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.697078 4708 generic.go:334] "Generic (PLEG): container finished" podID="08e414ef-4cf0-42c0-931b-299c7ff1a7a9" containerID="a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34" exitCode=0 Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.697166 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.697170 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"08e414ef-4cf0-42c0-931b-299c7ff1a7a9","Type":"ContainerDied","Data":"a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34"} Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.697557 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"08e414ef-4cf0-42c0-931b-299c7ff1a7a9","Type":"ContainerDied","Data":"0f49351f84fc55aff2539525f0be1e87f088eb407819b58effeda4f179b6bf2b"} Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.697593 4708 scope.go:117] "RemoveContainer" containerID="a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.699639 4708 generic.go:334] "Generic (PLEG): container finished" podID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerID="07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167" exitCode=143 Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.699676 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce4b430-d4a5-4a52-936a-47bce054b824","Type":"ContainerDied","Data":"07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167"} Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.730589 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2d2l\" (UniqueName: \"kubernetes.io/projected/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-kube-api-access-d2d2l\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.730620 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.730631 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e414ef-4cf0-42c0-931b-299c7ff1a7a9-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.739083 4708 scope.go:117] "RemoveContainer" containerID="a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34" Feb 03 07:31:15 crc kubenswrapper[4708]: E0203 07:31:15.741273 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34\": container with ID starting with a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34 not found: ID does not exist" containerID="a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.741331 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34"} err="failed to get container status \"a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34\": rpc error: code = NotFound desc = could not find container \"a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34\": container with ID starting with a8fa666489f0dc74553b7529cf755f837b9653ce8a11c8aa7b423c305e1aae34 not found: ID does not exist" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.777000 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.790771 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.799448 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:31:15 crc kubenswrapper[4708]: E0203 07:31:15.799905 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7bbff83-97e5-41e1-8445-0c7deba26930" containerName="dnsmasq-dns" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.799924 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7bbff83-97e5-41e1-8445-0c7deba26930" containerName="dnsmasq-dns" Feb 03 07:31:15 crc kubenswrapper[4708]: E0203 07:31:15.799940 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e55db381-d6ba-4d52-981a-918aacdedc97" containerName="nova-manage" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.799947 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e55db381-d6ba-4d52-981a-918aacdedc97" containerName="nova-manage" Feb 03 07:31:15 crc kubenswrapper[4708]: E0203 07:31:15.799982 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7bbff83-97e5-41e1-8445-0c7deba26930" containerName="init" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.799988 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7bbff83-97e5-41e1-8445-0c7deba26930" containerName="init" Feb 03 07:31:15 crc kubenswrapper[4708]: E0203 07:31:15.799997 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e414ef-4cf0-42c0-931b-299c7ff1a7a9" containerName="nova-scheduler-scheduler" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.800004 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e414ef-4cf0-42c0-931b-299c7ff1a7a9" containerName="nova-scheduler-scheduler" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.800178 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e55db381-d6ba-4d52-981a-918aacdedc97" containerName="nova-manage" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.800189 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7bbff83-97e5-41e1-8445-0c7deba26930" containerName="dnsmasq-dns" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.800203 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e414ef-4cf0-42c0-931b-299c7ff1a7a9" containerName="nova-scheduler-scheduler" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.800851 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.803016 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.807272 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.832313 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f68c9ee-6d86-4dad-b9cb-1a22c7afd031-config-data\") pod \"nova-scheduler-0\" (UID: \"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031\") " pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.832363 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f68c9ee-6d86-4dad-b9cb-1a22c7afd031-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031\") " pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.832480 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7m8q\" (UniqueName: \"kubernetes.io/projected/5f68c9ee-6d86-4dad-b9cb-1a22c7afd031-kube-api-access-b7m8q\") pod \"nova-scheduler-0\" (UID: \"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031\") " pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.934078 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7m8q\" (UniqueName: \"kubernetes.io/projected/5f68c9ee-6d86-4dad-b9cb-1a22c7afd031-kube-api-access-b7m8q\") pod \"nova-scheduler-0\" (UID: \"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031\") " pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.934146 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f68c9ee-6d86-4dad-b9cb-1a22c7afd031-config-data\") pod \"nova-scheduler-0\" (UID: \"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031\") " pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.934172 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f68c9ee-6d86-4dad-b9cb-1a22c7afd031-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031\") " pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.938649 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f68c9ee-6d86-4dad-b9cb-1a22c7afd031-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031\") " pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.939151 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f68c9ee-6d86-4dad-b9cb-1a22c7afd031-config-data\") pod \"nova-scheduler-0\" (UID: \"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031\") " pod="openstack/nova-scheduler-0" Feb 03 07:31:15 crc kubenswrapper[4708]: I0203 07:31:15.952076 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7m8q\" (UniqueName: \"kubernetes.io/projected/5f68c9ee-6d86-4dad-b9cb-1a22c7afd031-kube-api-access-b7m8q\") pod \"nova-scheduler-0\" (UID: \"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031\") " pod="openstack/nova-scheduler-0" Feb 03 07:31:16 crc kubenswrapper[4708]: I0203 07:31:16.106127 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08e414ef-4cf0-42c0-931b-299c7ff1a7a9" path="/var/lib/kubelet/pods/08e414ef-4cf0-42c0-931b-299c7ff1a7a9/volumes" Feb 03 07:31:16 crc kubenswrapper[4708]: I0203 07:31:16.129352 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 03 07:31:16 crc kubenswrapper[4708]: I0203 07:31:16.606215 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 03 07:31:16 crc kubenswrapper[4708]: W0203 07:31:16.607783 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f68c9ee_6d86_4dad_b9cb_1a22c7afd031.slice/crio-7a579ed2b85727c1b8cd20b19bfb0776ce8f00aeb550cb6dfa99d1b55cf2be80 WatchSource:0}: Error finding container 7a579ed2b85727c1b8cd20b19bfb0776ce8f00aeb550cb6dfa99d1b55cf2be80: Status 404 returned error can't find the container with id 7a579ed2b85727c1b8cd20b19bfb0776ce8f00aeb550cb6dfa99d1b55cf2be80 Feb 03 07:31:16 crc kubenswrapper[4708]: I0203 07:31:16.719391 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031","Type":"ContainerStarted","Data":"7a579ed2b85727c1b8cd20b19bfb0776ce8f00aeb550cb6dfa99d1b55cf2be80"} Feb 03 07:31:17 crc kubenswrapper[4708]: I0203 07:31:17.739582 4708 generic.go:334] "Generic (PLEG): container finished" podID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerID="b637062e287b3a9d9da7a42f869927094238cfc3758f6ce4455b1d43810a7178" exitCode=0 Feb 03 07:31:17 crc kubenswrapper[4708]: I0203 07:31:17.739680 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf","Type":"ContainerDied","Data":"b637062e287b3a9d9da7a42f869927094238cfc3758f6ce4455b1d43810a7178"} Feb 03 07:31:17 crc kubenswrapper[4708]: I0203 07:31:17.742048 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5f68c9ee-6d86-4dad-b9cb-1a22c7afd031","Type":"ContainerStarted","Data":"28d8e3c6949ecca37c43b2974384d0dab7ad6a99037cd56d8cbfb34284a786f3"} Feb 03 07:31:17 crc kubenswrapper[4708]: I0203 07:31:17.768480 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.768463856 podStartE2EDuration="2.768463856s" podCreationTimestamp="2026-02-03 07:31:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:31:17.760286135 +0000 UTC m=+1256.742232952" watchObservedRunningTime="2026-02-03 07:31:17.768463856 +0000 UTC m=+1256.750410663" Feb 03 07:31:17 crc kubenswrapper[4708]: I0203 07:31:17.929109 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.071503 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-logs\") pod \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.071873 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-combined-ca-bundle\") pod \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.071953 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62wrc\" (UniqueName: \"kubernetes.io/projected/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-kube-api-access-62wrc\") pod \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.071968 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-nova-metadata-tls-certs\") pod \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.071988 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-logs" (OuterVolumeSpecName: "logs") pod "3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" (UID: "3ccf3d30-d7b0-48cf-946f-4ba5154fefdf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.072041 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-config-data\") pod \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\" (UID: \"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf\") " Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.072566 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.077039 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-kube-api-access-62wrc" (OuterVolumeSpecName: "kube-api-access-62wrc") pod "3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" (UID: "3ccf3d30-d7b0-48cf-946f-4ba5154fefdf"). InnerVolumeSpecName "kube-api-access-62wrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.106068 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" (UID: "3ccf3d30-d7b0-48cf-946f-4ba5154fefdf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.127060 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-config-data" (OuterVolumeSpecName: "config-data") pod "3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" (UID: "3ccf3d30-d7b0-48cf-946f-4ba5154fefdf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.130243 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" (UID: "3ccf3d30-d7b0-48cf-946f-4ba5154fefdf"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.173944 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.173975 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62wrc\" (UniqueName: \"kubernetes.io/projected/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-kube-api-access-62wrc\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.173987 4708 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.173995 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.755194 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3ccf3d30-d7b0-48cf-946f-4ba5154fefdf","Type":"ContainerDied","Data":"2bb59a9576007bcab77a792371a53b83bd277db8a3f8080b8a6c4213502427e0"} Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.755240 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.755781 4708 scope.go:117] "RemoveContainer" containerID="b637062e287b3a9d9da7a42f869927094238cfc3758f6ce4455b1d43810a7178" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.789766 4708 scope.go:117] "RemoveContainer" containerID="497efe5a8956ca537466620d96a5228c8c12f319ac4616d1cce2cca596452583" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.803129 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.813730 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.830962 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:31:18 crc kubenswrapper[4708]: E0203 07:31:18.831340 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerName="nova-metadata-metadata" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.831356 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerName="nova-metadata-metadata" Feb 03 07:31:18 crc kubenswrapper[4708]: E0203 07:31:18.831381 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerName="nova-metadata-log" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.831389 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerName="nova-metadata-log" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.831558 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerName="nova-metadata-log" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.831575 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" containerName="nova-metadata-metadata" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.833078 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.835753 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.836653 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.849420 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.989980 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/999ecc72-71e3-4f11-910a-27bd07aa4a05-config-data\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.990067 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzw2c\" (UniqueName: \"kubernetes.io/projected/999ecc72-71e3-4f11-910a-27bd07aa4a05-kube-api-access-rzw2c\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.990171 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/999ecc72-71e3-4f11-910a-27bd07aa4a05-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.990226 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/999ecc72-71e3-4f11-910a-27bd07aa4a05-logs\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:18 crc kubenswrapper[4708]: I0203 07:31:18.990294 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/999ecc72-71e3-4f11-910a-27bd07aa4a05-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.091690 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzw2c\" (UniqueName: \"kubernetes.io/projected/999ecc72-71e3-4f11-910a-27bd07aa4a05-kube-api-access-rzw2c\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.091899 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/999ecc72-71e3-4f11-910a-27bd07aa4a05-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.091941 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/999ecc72-71e3-4f11-910a-27bd07aa4a05-logs\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.091989 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/999ecc72-71e3-4f11-910a-27bd07aa4a05-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.092317 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/999ecc72-71e3-4f11-910a-27bd07aa4a05-logs\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.092481 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/999ecc72-71e3-4f11-910a-27bd07aa4a05-config-data\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.102021 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/999ecc72-71e3-4f11-910a-27bd07aa4a05-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.102467 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/999ecc72-71e3-4f11-910a-27bd07aa4a05-config-data\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.108592 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/999ecc72-71e3-4f11-910a-27bd07aa4a05-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.116080 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzw2c\" (UniqueName: \"kubernetes.io/projected/999ecc72-71e3-4f11-910a-27bd07aa4a05-kube-api-access-rzw2c\") pod \"nova-metadata-0\" (UID: \"999ecc72-71e3-4f11-910a-27bd07aa4a05\") " pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.156534 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.526379 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.606846 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-config-data\") pod \"6ce4b430-d4a5-4a52-936a-47bce054b824\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.607013 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-internal-tls-certs\") pod \"6ce4b430-d4a5-4a52-936a-47bce054b824\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.607088 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce4b430-d4a5-4a52-936a-47bce054b824-logs\") pod \"6ce4b430-d4a5-4a52-936a-47bce054b824\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.607150 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwv46\" (UniqueName: \"kubernetes.io/projected/6ce4b430-d4a5-4a52-936a-47bce054b824-kube-api-access-rwv46\") pod \"6ce4b430-d4a5-4a52-936a-47bce054b824\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.607180 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-public-tls-certs\") pod \"6ce4b430-d4a5-4a52-936a-47bce054b824\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.607248 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-combined-ca-bundle\") pod \"6ce4b430-d4a5-4a52-936a-47bce054b824\" (UID: \"6ce4b430-d4a5-4a52-936a-47bce054b824\") " Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.607754 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ce4b430-d4a5-4a52-936a-47bce054b824-logs" (OuterVolumeSpecName: "logs") pod "6ce4b430-d4a5-4a52-936a-47bce054b824" (UID: "6ce4b430-d4a5-4a52-936a-47bce054b824"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.614004 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ce4b430-d4a5-4a52-936a-47bce054b824-kube-api-access-rwv46" (OuterVolumeSpecName: "kube-api-access-rwv46") pod "6ce4b430-d4a5-4a52-936a-47bce054b824" (UID: "6ce4b430-d4a5-4a52-936a-47bce054b824"). InnerVolumeSpecName "kube-api-access-rwv46". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.635256 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-config-data" (OuterVolumeSpecName: "config-data") pod "6ce4b430-d4a5-4a52-936a-47bce054b824" (UID: "6ce4b430-d4a5-4a52-936a-47bce054b824"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.656499 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 03 07:31:19 crc kubenswrapper[4708]: W0203 07:31:19.657319 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod999ecc72_71e3_4f11_910a_27bd07aa4a05.slice/crio-ddfeaefacd140d3a74c1d5dd7e655c094639a8c7fe8833d2b58118e5f0636f06 WatchSource:0}: Error finding container ddfeaefacd140d3a74c1d5dd7e655c094639a8c7fe8833d2b58118e5f0636f06: Status 404 returned error can't find the container with id ddfeaefacd140d3a74c1d5dd7e655c094639a8c7fe8833d2b58118e5f0636f06 Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.679619 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ce4b430-d4a5-4a52-936a-47bce054b824" (UID: "6ce4b430-d4a5-4a52-936a-47bce054b824"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.687106 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6ce4b430-d4a5-4a52-936a-47bce054b824" (UID: "6ce4b430-d4a5-4a52-936a-47bce054b824"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.693159 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6ce4b430-d4a5-4a52-936a-47bce054b824" (UID: "6ce4b430-d4a5-4a52-936a-47bce054b824"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.710017 4708 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.710060 4708 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ce4b430-d4a5-4a52-936a-47bce054b824-logs\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.710074 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwv46\" (UniqueName: \"kubernetes.io/projected/6ce4b430-d4a5-4a52-936a-47bce054b824-kube-api-access-rwv46\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.710091 4708 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.710104 4708 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.710116 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ce4b430-d4a5-4a52-936a-47bce054b824-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.766984 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"999ecc72-71e3-4f11-910a-27bd07aa4a05","Type":"ContainerStarted","Data":"ddfeaefacd140d3a74c1d5dd7e655c094639a8c7fe8833d2b58118e5f0636f06"} Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.774286 4708 generic.go:334] "Generic (PLEG): container finished" podID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerID="c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb" exitCode=0 Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.774333 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce4b430-d4a5-4a52-936a-47bce054b824","Type":"ContainerDied","Data":"c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb"} Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.774361 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ce4b430-d4a5-4a52-936a-47bce054b824","Type":"ContainerDied","Data":"23aa1ff04578b47322b5f9eaa53c18a2d383d118d0d34fca21207c9de4d54ee0"} Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.774377 4708 scope.go:117] "RemoveContainer" containerID="c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.774374 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.816388 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.820276 4708 scope.go:117] "RemoveContainer" containerID="07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.830104 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.844032 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:19 crc kubenswrapper[4708]: E0203 07:31:19.844558 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerName="nova-api-log" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.844579 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerName="nova-api-log" Feb 03 07:31:19 crc kubenswrapper[4708]: E0203 07:31:19.844590 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerName="nova-api-api" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.844599 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerName="nova-api-api" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.844811 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerName="nova-api-api" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.844847 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" containerName="nova-api-log" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.845921 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.848917 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.849038 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.849136 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.853170 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.856711 4708 scope.go:117] "RemoveContainer" containerID="c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb" Feb 03 07:31:19 crc kubenswrapper[4708]: E0203 07:31:19.857220 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb\": container with ID starting with c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb not found: ID does not exist" containerID="c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.857270 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb"} err="failed to get container status \"c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb\": rpc error: code = NotFound desc = could not find container \"c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb\": container with ID starting with c06770ab48c3f6fd8d8edb6267daffdf921570bdb6e95c90e0efb01249f333cb not found: ID does not exist" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.857299 4708 scope.go:117] "RemoveContainer" containerID="07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167" Feb 03 07:31:19 crc kubenswrapper[4708]: E0203 07:31:19.857742 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167\": container with ID starting with 07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167 not found: ID does not exist" containerID="07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.857776 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167"} err="failed to get container status \"07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167\": rpc error: code = NotFound desc = could not find container \"07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167\": container with ID starting with 07b78cb51b2c6032ad1770564fe335b7cf8cd106851967c4ee56877604b40167 not found: ID does not exist" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.913458 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-public-tls-certs\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.913508 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-logs\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.913610 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bbc5\" (UniqueName: \"kubernetes.io/projected/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-kube-api-access-2bbc5\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.913731 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-config-data\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.913808 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:19 crc kubenswrapper[4708]: I0203 07:31:19.913909 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.015526 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-config-data\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.015562 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.015610 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.015631 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-public-tls-certs\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.015654 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-logs\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.015693 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bbc5\" (UniqueName: \"kubernetes.io/projected/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-kube-api-access-2bbc5\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.017824 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-logs\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.018770 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.019191 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.020917 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-public-tls-certs\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.022566 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-config-data\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.033930 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bbc5\" (UniqueName: \"kubernetes.io/projected/0dead86a-ae50-4e2d-b917-c23cf0a6bf6c-kube-api-access-2bbc5\") pod \"nova-api-0\" (UID: \"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c\") " pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.117192 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ccf3d30-d7b0-48cf-946f-4ba5154fefdf" path="/var/lib/kubelet/pods/3ccf3d30-d7b0-48cf-946f-4ba5154fefdf/volumes" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.119215 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ce4b430-d4a5-4a52-936a-47bce054b824" path="/var/lib/kubelet/pods/6ce4b430-d4a5-4a52-936a-47bce054b824/volumes" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.187815 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.645128 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.789641 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c","Type":"ContainerStarted","Data":"bba72c87b62f816f8e7494126eab18991667e050c5ce5eb0b50a6955ee51407c"} Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.792439 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"999ecc72-71e3-4f11-910a-27bd07aa4a05","Type":"ContainerStarted","Data":"c7e139e5b6609507d5a5443c2bd777e1a3b2ce23a3b3b3ffa386ef6d394221f5"} Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.792487 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"999ecc72-71e3-4f11-910a-27bd07aa4a05","Type":"ContainerStarted","Data":"36be522c5f75d4709dc85981cad0c2dfc4c55ad3d12564a7d3a085f5f878a82a"} Feb 03 07:31:20 crc kubenswrapper[4708]: I0203 07:31:20.819356 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.8193299659999997 podStartE2EDuration="2.819329966s" podCreationTimestamp="2026-02-03 07:31:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:31:20.809414733 +0000 UTC m=+1259.791361540" watchObservedRunningTime="2026-02-03 07:31:20.819329966 +0000 UTC m=+1259.801276793" Feb 03 07:31:21 crc kubenswrapper[4708]: I0203 07:31:21.130080 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 03 07:31:21 crc kubenswrapper[4708]: I0203 07:31:21.805732 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c","Type":"ContainerStarted","Data":"1602fc1ebf5c1321695f7fec040c760d3d62d1460a41f8ebc3f74dcd22ee3d61"} Feb 03 07:31:21 crc kubenswrapper[4708]: I0203 07:31:21.805859 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0dead86a-ae50-4e2d-b917-c23cf0a6bf6c","Type":"ContainerStarted","Data":"287f116707abb34ceffdd3e597ff56b6d83efd7dad5cfc686421a1800d2d27f4"} Feb 03 07:31:21 crc kubenswrapper[4708]: I0203 07:31:21.828742 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.828719468 podStartE2EDuration="2.828719468s" podCreationTimestamp="2026-02-03 07:31:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:31:21.827495318 +0000 UTC m=+1260.809442145" watchObservedRunningTime="2026-02-03 07:31:21.828719468 +0000 UTC m=+1260.810666265" Feb 03 07:31:23 crc kubenswrapper[4708]: I0203 07:31:23.833346 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:31:23 crc kubenswrapper[4708]: I0203 07:31:23.833584 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:31:23 crc kubenswrapper[4708]: I0203 07:31:23.833627 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:31:23 crc kubenswrapper[4708]: I0203 07:31:23.834116 4708 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"df2930ca149e0a66df19a750e27479be61f11887a85606435a8612426d90bb50"} pod="openshift-machine-config-operator/machine-config-daemon-r94bn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:31:23 crc kubenswrapper[4708]: I0203 07:31:23.834171 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" containerID="cri-o://df2930ca149e0a66df19a750e27479be61f11887a85606435a8612426d90bb50" gracePeriod=600 Feb 03 07:31:24 crc kubenswrapper[4708]: I0203 07:31:24.157425 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:31:24 crc kubenswrapper[4708]: I0203 07:31:24.157778 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 03 07:31:24 crc kubenswrapper[4708]: I0203 07:31:24.837143 4708 generic.go:334] "Generic (PLEG): container finished" podID="67498414-5132-496e-9638-189f5941ace0" containerID="df2930ca149e0a66df19a750e27479be61f11887a85606435a8612426d90bb50" exitCode=0 Feb 03 07:31:24 crc kubenswrapper[4708]: I0203 07:31:24.837209 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerDied","Data":"df2930ca149e0a66df19a750e27479be61f11887a85606435a8612426d90bb50"} Feb 03 07:31:24 crc kubenswrapper[4708]: I0203 07:31:24.837616 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"be6dd4d0258e6d5acc381dbf52f783da9b3b29ef844719ae2478040c809eed5d"} Feb 03 07:31:24 crc kubenswrapper[4708]: I0203 07:31:24.837635 4708 scope.go:117] "RemoveContainer" containerID="1f83852e58fe0e7c3b1e3ce74595be18ac409f305b8edecdccc7efc4c0f59a4b" Feb 03 07:31:26 crc kubenswrapper[4708]: I0203 07:31:26.130034 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 03 07:31:26 crc kubenswrapper[4708]: I0203 07:31:26.158942 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 03 07:31:26 crc kubenswrapper[4708]: I0203 07:31:26.891069 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 03 07:31:29 crc kubenswrapper[4708]: I0203 07:31:29.157268 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 07:31:29 crc kubenswrapper[4708]: I0203 07:31:29.157561 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 03 07:31:30 crc kubenswrapper[4708]: I0203 07:31:30.176183 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="999ecc72-71e3-4f11-910a-27bd07aa4a05" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.212:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:31:30 crc kubenswrapper[4708]: I0203 07:31:30.176224 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="999ecc72-71e3-4f11-910a-27bd07aa4a05" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.212:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:31:30 crc kubenswrapper[4708]: I0203 07:31:30.188621 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:31:30 crc kubenswrapper[4708]: I0203 07:31:30.188704 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 03 07:31:31 crc kubenswrapper[4708]: I0203 07:31:31.198915 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0dead86a-ae50-4e2d-b917-c23cf0a6bf6c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.213:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:31:31 crc kubenswrapper[4708]: I0203 07:31:31.198929 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0dead86a-ae50-4e2d-b917-c23cf0a6bf6c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.213:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 03 07:31:31 crc kubenswrapper[4708]: I0203 07:31:31.918432 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 03 07:31:39 crc kubenswrapper[4708]: I0203 07:31:39.162875 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 03 07:31:39 crc kubenswrapper[4708]: I0203 07:31:39.169583 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 03 07:31:39 crc kubenswrapper[4708]: I0203 07:31:39.177964 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 03 07:31:40 crc kubenswrapper[4708]: I0203 07:31:40.018593 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 03 07:31:40 crc kubenswrapper[4708]: I0203 07:31:40.199341 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 03 07:31:40 crc kubenswrapper[4708]: I0203 07:31:40.200063 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 03 07:31:40 crc kubenswrapper[4708]: I0203 07:31:40.200335 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 03 07:31:40 crc kubenswrapper[4708]: I0203 07:31:40.206183 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 03 07:31:41 crc kubenswrapper[4708]: I0203 07:31:41.021837 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 03 07:31:41 crc kubenswrapper[4708]: I0203 07:31:41.027831 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 03 07:31:48 crc kubenswrapper[4708]: I0203 07:31:48.740503 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:31:49 crc kubenswrapper[4708]: I0203 07:31:49.760600 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:31:53 crc kubenswrapper[4708]: I0203 07:31:53.342280 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="0edbabdf-99f1-49b3-83ee-48ad17467638" containerName="rabbitmq" containerID="cri-o://7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870" gracePeriod=604796 Feb 03 07:31:53 crc kubenswrapper[4708]: I0203 07:31:53.953046 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="2572b4b8-5df3-4d81-9bd7-8ef427c6d945" containerName="rabbitmq" containerID="cri-o://b5499106701aba8c5e5b96ad9f798143bd1ca1dbb0a5be7b4f68bf4634038c1e" gracePeriod=604796 Feb 03 07:31:56 crc kubenswrapper[4708]: I0203 07:31:56.138387 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="0edbabdf-99f1-49b3-83ee-48ad17467638" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.94:5671: connect: connection refused" Feb 03 07:31:56 crc kubenswrapper[4708]: I0203 07:31:56.502273 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="2572b4b8-5df3-4d81-9bd7-8ef427c6d945" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.95:5671: connect: connection refused" Feb 03 07:31:59 crc kubenswrapper[4708]: I0203 07:31:59.975383 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.000950 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-tls\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.001035 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-config-data\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.001121 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-confd\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.001153 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-server-conf\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.008956 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.096429 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-config-data" (OuterVolumeSpecName: "config-data") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.103976 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0edbabdf-99f1-49b3-83ee-48ad17467638-pod-info\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.104033 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-erlang-cookie\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.104072 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0edbabdf-99f1-49b3-83ee-48ad17467638-erlang-cookie-secret\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.104098 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-plugins-conf\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.104181 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvs4r\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-kube-api-access-cvs4r\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.104290 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-plugins\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.104340 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"0edbabdf-99f1-49b3-83ee-48ad17467638\" (UID: \"0edbabdf-99f1-49b3-83ee-48ad17467638\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.104941 4708 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.104959 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.110206 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.111911 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.117150 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.117259 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.117804 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-kube-api-access-cvs4r" (OuterVolumeSpecName: "kube-api-access-cvs4r") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "kube-api-access-cvs4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.119686 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0edbabdf-99f1-49b3-83ee-48ad17467638-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.119888 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0edbabdf-99f1-49b3-83ee-48ad17467638-pod-info" (OuterVolumeSpecName: "pod-info") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.135684 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-server-conf" (OuterVolumeSpecName: "server-conf") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.193544 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0edbabdf-99f1-49b3-83ee-48ad17467638" (UID: "0edbabdf-99f1-49b3-83ee-48ad17467638"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.206932 4708 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.206961 4708 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-server-conf\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.206972 4708 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.207002 4708 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.207014 4708 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0edbabdf-99f1-49b3-83ee-48ad17467638-pod-info\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.207023 4708 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0edbabdf-99f1-49b3-83ee-48ad17467638-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.207033 4708 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0edbabdf-99f1-49b3-83ee-48ad17467638-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.207043 4708 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0edbabdf-99f1-49b3-83ee-48ad17467638-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.207051 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvs4r\" (UniqueName: \"kubernetes.io/projected/0edbabdf-99f1-49b3-83ee-48ad17467638-kube-api-access-cvs4r\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.233703 4708 generic.go:334] "Generic (PLEG): container finished" podID="0edbabdf-99f1-49b3-83ee-48ad17467638" containerID="7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870" exitCode=0 Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.233858 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0edbabdf-99f1-49b3-83ee-48ad17467638","Type":"ContainerDied","Data":"7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870"} Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.234207 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0edbabdf-99f1-49b3-83ee-48ad17467638","Type":"ContainerDied","Data":"052056c6b202ab852ba55e65fe4f4584d238854795d5920d3f5201d72f512039"} Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.234229 4708 scope.go:117] "RemoveContainer" containerID="7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.233922 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.247984 4708 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.251538 4708 generic.go:334] "Generic (PLEG): container finished" podID="2572b4b8-5df3-4d81-9bd7-8ef427c6d945" containerID="b5499106701aba8c5e5b96ad9f798143bd1ca1dbb0a5be7b4f68bf4634038c1e" exitCode=0 Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.251621 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2572b4b8-5df3-4d81-9bd7-8ef427c6d945","Type":"ContainerDied","Data":"b5499106701aba8c5e5b96ad9f798143bd1ca1dbb0a5be7b4f68bf4634038c1e"} Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.275610 4708 scope.go:117] "RemoveContainer" containerID="b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.284904 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.326652 4708 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.333941 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.343258 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.343293 4708 scope.go:117] "RemoveContainer" containerID="7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870" Feb 03 07:32:00 crc kubenswrapper[4708]: E0203 07:32:00.343683 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0edbabdf-99f1-49b3-83ee-48ad17467638" containerName="setup-container" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.343702 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="0edbabdf-99f1-49b3-83ee-48ad17467638" containerName="setup-container" Feb 03 07:32:00 crc kubenswrapper[4708]: E0203 07:32:00.343725 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0edbabdf-99f1-49b3-83ee-48ad17467638" containerName="rabbitmq" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.343731 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="0edbabdf-99f1-49b3-83ee-48ad17467638" containerName="rabbitmq" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.343950 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="0edbabdf-99f1-49b3-83ee-48ad17467638" containerName="rabbitmq" Feb 03 07:32:00 crc kubenswrapper[4708]: E0203 07:32:00.344179 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870\": container with ID starting with 7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870 not found: ID does not exist" containerID="7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.344223 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870"} err="failed to get container status \"7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870\": rpc error: code = NotFound desc = could not find container \"7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870\": container with ID starting with 7e59f79907e1d5dd19cc411977e7fe1121123bdbd223e84f0d86533ed18de870 not found: ID does not exist" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.344249 4708 scope.go:117] "RemoveContainer" containerID="b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa" Feb 03 07:32:00 crc kubenswrapper[4708]: E0203 07:32:00.344535 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa\": container with ID starting with b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa not found: ID does not exist" containerID="b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.344566 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa"} err="failed to get container status \"b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa\": rpc error: code = NotFound desc = could not find container \"b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa\": container with ID starting with b1e2311a7605dd6875da84dc45c7f866ac255e7770361e3a9016bff4ae16a8aa not found: ID does not exist" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.344981 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.347335 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.347504 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.348079 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.348089 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.352691 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.348145 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-7dqg9" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.348207 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.348241 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.500043 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.536207 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/baf187c5-9fe4-4496-8f70-ac916d0bb075-server-conf\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.536305 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/baf187c5-9fe4-4496-8f70-ac916d0bb075-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.536589 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.536691 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/baf187c5-9fe4-4496-8f70-ac916d0bb075-config-data\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.536744 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.536867 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/baf187c5-9fe4-4496-8f70-ac916d0bb075-pod-info\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.536971 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdpf4\" (UniqueName: \"kubernetes.io/projected/baf187c5-9fe4-4496-8f70-ac916d0bb075-kube-api-access-gdpf4\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.537017 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/baf187c5-9fe4-4496-8f70-ac916d0bb075-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.537037 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.537082 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.537101 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.637966 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-tls\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638016 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs6rv\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-kube-api-access-rs6rv\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638169 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638193 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-plugins\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638212 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-confd\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638245 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-erlang-cookie\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638288 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-pod-info\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638317 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-server-conf\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638333 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-plugins-conf\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638356 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-config-data\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638388 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-erlang-cookie-secret\") pod \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\" (UID: \"2572b4b8-5df3-4d81-9bd7-8ef427c6d945\") " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638629 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/baf187c5-9fe4-4496-8f70-ac916d0bb075-pod-info\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638695 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdpf4\" (UniqueName: \"kubernetes.io/projected/baf187c5-9fe4-4496-8f70-ac916d0bb075-kube-api-access-gdpf4\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638724 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/baf187c5-9fe4-4496-8f70-ac916d0bb075-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638740 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638774 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638828 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638875 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/baf187c5-9fe4-4496-8f70-ac916d0bb075-server-conf\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638920 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/baf187c5-9fe4-4496-8f70-ac916d0bb075-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638942 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638961 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/baf187c5-9fe4-4496-8f70-ac916d0bb075-config-data\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.638979 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.639645 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.639842 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/baf187c5-9fe4-4496-8f70-ac916d0bb075-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.642763 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/baf187c5-9fe4-4496-8f70-ac916d0bb075-server-conf\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.642899 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.642934 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.643016 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-kube-api-access-rs6rv" (OuterVolumeSpecName: "kube-api-access-rs6rv") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "kube-api-access-rs6rv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.643312 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.644672 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.645228 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.647715 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.648247 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.649639 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/baf187c5-9fe4-4496-8f70-ac916d0bb075-config-data\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.652953 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/baf187c5-9fe4-4496-8f70-ac916d0bb075-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.657950 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/baf187c5-9fe4-4496-8f70-ac916d0bb075-pod-info\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.664014 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.671956 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.673182 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/baf187c5-9fe4-4496-8f70-ac916d0bb075-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.677717 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-pod-info" (OuterVolumeSpecName: "pod-info") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.685729 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdpf4\" (UniqueName: \"kubernetes.io/projected/baf187c5-9fe4-4496-8f70-ac916d0bb075-kube-api-access-gdpf4\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.702680 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"baf187c5-9fe4-4496-8f70-ac916d0bb075\") " pod="openstack/rabbitmq-server-0" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.741094 4708 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-pod-info\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.741124 4708 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.741134 4708 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.741143 4708 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.741151 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs6rv\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-kube-api-access-rs6rv\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.741175 4708 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.741184 4708 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.741193 4708 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.750172 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-config-data" (OuterVolumeSpecName: "config-data") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.753550 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-server-conf" (OuterVolumeSpecName: "server-conf") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.764169 4708 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.796823 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "2572b4b8-5df3-4d81-9bd7-8ef427c6d945" (UID: "2572b4b8-5df3-4d81-9bd7-8ef427c6d945"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.843220 4708 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.843248 4708 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.843258 4708 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-server-conf\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.843267 4708 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2572b4b8-5df3-4d81-9bd7-8ef427c6d945-config-data\") on node \"crc\" DevicePath \"\"" Feb 03 07:32:00 crc kubenswrapper[4708]: I0203 07:32:00.978019 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.267844 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2572b4b8-5df3-4d81-9bd7-8ef427c6d945","Type":"ContainerDied","Data":"d7256cde6d9ca3b4018e771c6adfcdb68cb7b560f0574609748d8c7f053af2d8"} Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.267900 4708 scope.go:117] "RemoveContainer" containerID="b5499106701aba8c5e5b96ad9f798143bd1ca1dbb0a5be7b4f68bf4634038c1e" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.268139 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.305965 4708 scope.go:117] "RemoveContainer" containerID="2beb5c3e89e59e9e8aba167e3cad443c61c86c2d6b1a629e4cbf9f24206b0baa" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.310057 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.330005 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.347833 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:32:01 crc kubenswrapper[4708]: E0203 07:32:01.348360 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2572b4b8-5df3-4d81-9bd7-8ef427c6d945" containerName="rabbitmq" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.348385 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="2572b4b8-5df3-4d81-9bd7-8ef427c6d945" containerName="rabbitmq" Feb 03 07:32:01 crc kubenswrapper[4708]: E0203 07:32:01.348411 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2572b4b8-5df3-4d81-9bd7-8ef427c6d945" containerName="setup-container" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.348423 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="2572b4b8-5df3-4d81-9bd7-8ef427c6d945" containerName="setup-container" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.348646 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="2572b4b8-5df3-4d81-9bd7-8ef427c6d945" containerName="rabbitmq" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.349713 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.357253 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.357569 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.357829 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jdccx" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.358034 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.358320 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.359012 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.361861 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.367959 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457277 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457539 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457579 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457616 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zptlc\" (UniqueName: \"kubernetes.io/projected/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-kube-api-access-zptlc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457682 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457701 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457754 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457805 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457844 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457865 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.457888 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.461162 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559480 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559543 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559585 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zptlc\" (UniqueName: \"kubernetes.io/projected/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-kube-api-access-zptlc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559625 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559643 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559675 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559709 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559746 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559767 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559804 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.559822 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.561782 4708 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.564334 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.564627 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.564939 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.565014 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.565138 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.565267 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.567419 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.568028 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.570001 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.583217 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zptlc\" (UniqueName: \"kubernetes.io/projected/a1eb365e-2bf1-450f-90ae-5ca8f2de2de6-kube-api-access-zptlc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.616847 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6\") " pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:01 crc kubenswrapper[4708]: I0203 07:32:01.690713 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:02 crc kubenswrapper[4708]: I0203 07:32:02.107492 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0edbabdf-99f1-49b3-83ee-48ad17467638" path="/var/lib/kubelet/pods/0edbabdf-99f1-49b3-83ee-48ad17467638/volumes" Feb 03 07:32:02 crc kubenswrapper[4708]: I0203 07:32:02.108896 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2572b4b8-5df3-4d81-9bd7-8ef427c6d945" path="/var/lib/kubelet/pods/2572b4b8-5df3-4d81-9bd7-8ef427c6d945/volumes" Feb 03 07:32:02 crc kubenswrapper[4708]: W0203 07:32:02.199068 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1eb365e_2bf1_450f_90ae_5ca8f2de2de6.slice/crio-111d631a427abbeca7d19652644898a8cc422df5ce906b7c6808fcfc210bbe55 WatchSource:0}: Error finding container 111d631a427abbeca7d19652644898a8cc422df5ce906b7c6808fcfc210bbe55: Status 404 returned error can't find the container with id 111d631a427abbeca7d19652644898a8cc422df5ce906b7c6808fcfc210bbe55 Feb 03 07:32:02 crc kubenswrapper[4708]: I0203 07:32:02.209966 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 03 07:32:02 crc kubenswrapper[4708]: I0203 07:32:02.286253 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"baf187c5-9fe4-4496-8f70-ac916d0bb075","Type":"ContainerStarted","Data":"e8cf56abc3ef44fd68ee203aa71ee9e0a329ec237134ba29794b00dd5f8391da"} Feb 03 07:32:02 crc kubenswrapper[4708]: I0203 07:32:02.287363 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6","Type":"ContainerStarted","Data":"111d631a427abbeca7d19652644898a8cc422df5ce906b7c6808fcfc210bbe55"} Feb 03 07:32:04 crc kubenswrapper[4708]: I0203 07:32:04.306443 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6","Type":"ContainerStarted","Data":"e0ce33f05776781e5841a8c6adabae813c04d9499cd8204361803f1546c2c5a1"} Feb 03 07:32:04 crc kubenswrapper[4708]: I0203 07:32:04.308933 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"baf187c5-9fe4-4496-8f70-ac916d0bb075","Type":"ContainerStarted","Data":"e92fa2a380e8ab02a9d694b453ad09a5100676dde9f6bb420cc41e68195fb031"} Feb 03 07:32:35 crc kubenswrapper[4708]: I0203 07:32:35.622099 4708 generic.go:334] "Generic (PLEG): container finished" podID="baf187c5-9fe4-4496-8f70-ac916d0bb075" containerID="e92fa2a380e8ab02a9d694b453ad09a5100676dde9f6bb420cc41e68195fb031" exitCode=0 Feb 03 07:32:35 crc kubenswrapper[4708]: I0203 07:32:35.622260 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"baf187c5-9fe4-4496-8f70-ac916d0bb075","Type":"ContainerDied","Data":"e92fa2a380e8ab02a9d694b453ad09a5100676dde9f6bb420cc41e68195fb031"} Feb 03 07:32:36 crc kubenswrapper[4708]: I0203 07:32:36.632456 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"baf187c5-9fe4-4496-8f70-ac916d0bb075","Type":"ContainerStarted","Data":"c436ee5a7efc609f312d9780db516f732e82568d48b3a112eb48f26938b4aabf"} Feb 03 07:32:36 crc kubenswrapper[4708]: I0203 07:32:36.632986 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 03 07:32:36 crc kubenswrapper[4708]: I0203 07:32:36.634375 4708 generic.go:334] "Generic (PLEG): container finished" podID="a1eb365e-2bf1-450f-90ae-5ca8f2de2de6" containerID="e0ce33f05776781e5841a8c6adabae813c04d9499cd8204361803f1546c2c5a1" exitCode=0 Feb 03 07:32:36 crc kubenswrapper[4708]: I0203 07:32:36.634448 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6","Type":"ContainerDied","Data":"e0ce33f05776781e5841a8c6adabae813c04d9499cd8204361803f1546c2c5a1"} Feb 03 07:32:36 crc kubenswrapper[4708]: I0203 07:32:36.671961 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.671940989 podStartE2EDuration="36.671940989s" podCreationTimestamp="2026-02-03 07:32:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:32:36.660999031 +0000 UTC m=+1335.642945858" watchObservedRunningTime="2026-02-03 07:32:36.671940989 +0000 UTC m=+1335.653887826" Feb 03 07:32:37 crc kubenswrapper[4708]: I0203 07:32:37.645118 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a1eb365e-2bf1-450f-90ae-5ca8f2de2de6","Type":"ContainerStarted","Data":"4f1e704cbf8b6ee23fa89225761b6e3149c70fb1cfe0e15f650c2709a07ab93b"} Feb 03 07:32:37 crc kubenswrapper[4708]: I0203 07:32:37.645701 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:32:37 crc kubenswrapper[4708]: I0203 07:32:37.671267 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.671248954 podStartE2EDuration="36.671248954s" podCreationTimestamp="2026-02-03 07:32:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:32:37.664304594 +0000 UTC m=+1336.646251401" watchObservedRunningTime="2026-02-03 07:32:37.671248954 +0000 UTC m=+1336.653195751" Feb 03 07:32:50 crc kubenswrapper[4708]: I0203 07:32:50.981015 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 03 07:32:51 crc kubenswrapper[4708]: I0203 07:32:51.694998 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 03 07:33:45 crc kubenswrapper[4708]: I0203 07:33:45.799442 4708 scope.go:117] "RemoveContainer" containerID="7149d37923a18dfa510a1746af23e264541c58d6c9edbae1fa55797eefc62feb" Feb 03 07:33:45 crc kubenswrapper[4708]: I0203 07:33:45.831519 4708 scope.go:117] "RemoveContainer" containerID="3d00c39fed00c0ab018d6f17b738c44a8d9ddec13c79629ada6f288f394739e1" Feb 03 07:33:45 crc kubenswrapper[4708]: I0203 07:33:45.884289 4708 scope.go:117] "RemoveContainer" containerID="94fb1a6d62db6177569169006262b4aa64eb823c98086daf1735d3a05ebabe81" Feb 03 07:33:45 crc kubenswrapper[4708]: I0203 07:33:45.946403 4708 scope.go:117] "RemoveContainer" containerID="963007256d1b21421042649674668b2a3384dfd620dd3734b27ce160a1bd0d6c" Feb 03 07:33:53 crc kubenswrapper[4708]: I0203 07:33:53.833314 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:33:53 crc kubenswrapper[4708]: I0203 07:33:53.833907 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:34:23 crc kubenswrapper[4708]: I0203 07:34:23.832826 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:34:23 crc kubenswrapper[4708]: I0203 07:34:23.833422 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:34:46 crc kubenswrapper[4708]: I0203 07:34:46.058477 4708 scope.go:117] "RemoveContainer" containerID="cc9c1d8b39a8e19ddeda841a7167cce7198b34f64efd8c521320ce2762ac0382" Feb 03 07:34:46 crc kubenswrapper[4708]: I0203 07:34:46.087704 4708 scope.go:117] "RemoveContainer" containerID="c6fe7d6c4788d0785776e8ad0c91a07b6a3d25835e512034d57e333cdd2d7fa1" Feb 03 07:34:46 crc kubenswrapper[4708]: I0203 07:34:46.135403 4708 scope.go:117] "RemoveContainer" containerID="7ba1c6e5b06e4d3086cb7edbcc917f45894325a6286b31509c170c2ab9bb888a" Feb 03 07:34:46 crc kubenswrapper[4708]: I0203 07:34:46.154759 4708 scope.go:117] "RemoveContainer" containerID="8418b11a0b52c4d367b2fe89a33fe20096e713a906b9ab11b43f0195cc97d3b5" Feb 03 07:34:53 crc kubenswrapper[4708]: I0203 07:34:53.833148 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:34:53 crc kubenswrapper[4708]: I0203 07:34:53.833767 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:34:53 crc kubenswrapper[4708]: I0203 07:34:53.833833 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:34:53 crc kubenswrapper[4708]: I0203 07:34:53.834594 4708 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"be6dd4d0258e6d5acc381dbf52f783da9b3b29ef844719ae2478040c809eed5d"} pod="openshift-machine-config-operator/machine-config-daemon-r94bn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:34:53 crc kubenswrapper[4708]: I0203 07:34:53.834652 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" containerID="cri-o://be6dd4d0258e6d5acc381dbf52f783da9b3b29ef844719ae2478040c809eed5d" gracePeriod=600 Feb 03 07:34:53 crc kubenswrapper[4708]: I0203 07:34:53.984827 4708 generic.go:334] "Generic (PLEG): container finished" podID="67498414-5132-496e-9638-189f5941ace0" containerID="be6dd4d0258e6d5acc381dbf52f783da9b3b29ef844719ae2478040c809eed5d" exitCode=0 Feb 03 07:34:53 crc kubenswrapper[4708]: I0203 07:34:53.985004 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerDied","Data":"be6dd4d0258e6d5acc381dbf52f783da9b3b29ef844719ae2478040c809eed5d"} Feb 03 07:34:53 crc kubenswrapper[4708]: I0203 07:34:53.985249 4708 scope.go:117] "RemoveContainer" containerID="df2930ca149e0a66df19a750e27479be61f11887a85606435a8612426d90bb50" Feb 03 07:34:54 crc kubenswrapper[4708]: I0203 07:34:54.998111 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5"} Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.481143 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6h9dx"] Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.485359 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.526154 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6h9dx"] Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.575778 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-catalog-content\") pod \"certified-operators-6h9dx\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.575841 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc7mx\" (UniqueName: \"kubernetes.io/projected/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-kube-api-access-sc7mx\") pod \"certified-operators-6h9dx\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.575871 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-utilities\") pod \"certified-operators-6h9dx\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.677282 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-catalog-content\") pod \"certified-operators-6h9dx\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.677354 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc7mx\" (UniqueName: \"kubernetes.io/projected/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-kube-api-access-sc7mx\") pod \"certified-operators-6h9dx\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.677387 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-utilities\") pod \"certified-operators-6h9dx\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.677841 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-catalog-content\") pod \"certified-operators-6h9dx\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.677878 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-utilities\") pod \"certified-operators-6h9dx\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.703339 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc7mx\" (UniqueName: \"kubernetes.io/projected/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-kube-api-access-sc7mx\") pod \"certified-operators-6h9dx\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:09 crc kubenswrapper[4708]: I0203 07:36:09.811324 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:10 crc kubenswrapper[4708]: I0203 07:36:10.474534 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6h9dx"] Feb 03 07:36:10 crc kubenswrapper[4708]: I0203 07:36:10.735741 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h9dx" event={"ID":"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b","Type":"ContainerStarted","Data":"33c0806ead1046da4993bbab77fc8b54c6c5cb2cc554299222d8d97c436927db"} Feb 03 07:36:11 crc kubenswrapper[4708]: I0203 07:36:11.748087 4708 generic.go:334] "Generic (PLEG): container finished" podID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerID="45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0" exitCode=0 Feb 03 07:36:11 crc kubenswrapper[4708]: I0203 07:36:11.748200 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h9dx" event={"ID":"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b","Type":"ContainerDied","Data":"45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0"} Feb 03 07:36:11 crc kubenswrapper[4708]: I0203 07:36:11.751062 4708 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:36:13 crc kubenswrapper[4708]: I0203 07:36:13.868954 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n9dmh"] Feb 03 07:36:13 crc kubenswrapper[4708]: I0203 07:36:13.871674 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:13 crc kubenswrapper[4708]: I0203 07:36:13.881458 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n9dmh"] Feb 03 07:36:13 crc kubenswrapper[4708]: I0203 07:36:13.967666 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-utilities\") pod \"community-operators-n9dmh\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:13 crc kubenswrapper[4708]: I0203 07:36:13.968139 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8ddv\" (UniqueName: \"kubernetes.io/projected/2aa296b0-7fc9-43b8-a149-269ff34dfef8-kube-api-access-p8ddv\") pod \"community-operators-n9dmh\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:13 crc kubenswrapper[4708]: I0203 07:36:13.968333 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-catalog-content\") pod \"community-operators-n9dmh\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:14 crc kubenswrapper[4708]: I0203 07:36:14.080272 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-utilities\") pod \"community-operators-n9dmh\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:14 crc kubenswrapper[4708]: I0203 07:36:14.080719 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-utilities\") pod \"community-operators-n9dmh\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:14 crc kubenswrapper[4708]: I0203 07:36:14.080778 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8ddv\" (UniqueName: \"kubernetes.io/projected/2aa296b0-7fc9-43b8-a149-269ff34dfef8-kube-api-access-p8ddv\") pod \"community-operators-n9dmh\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:14 crc kubenswrapper[4708]: I0203 07:36:14.080864 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-catalog-content\") pod \"community-operators-n9dmh\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:14 crc kubenswrapper[4708]: I0203 07:36:14.081467 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-catalog-content\") pod \"community-operators-n9dmh\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:14 crc kubenswrapper[4708]: I0203 07:36:14.103482 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8ddv\" (UniqueName: \"kubernetes.io/projected/2aa296b0-7fc9-43b8-a149-269ff34dfef8-kube-api-access-p8ddv\") pod \"community-operators-n9dmh\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:14 crc kubenswrapper[4708]: I0203 07:36:14.236475 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:14 crc kubenswrapper[4708]: I0203 07:36:14.780680 4708 generic.go:334] "Generic (PLEG): container finished" podID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerID="230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609" exitCode=0 Feb 03 07:36:14 crc kubenswrapper[4708]: I0203 07:36:14.780896 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h9dx" event={"ID":"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b","Type":"ContainerDied","Data":"230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609"} Feb 03 07:36:14 crc kubenswrapper[4708]: I0203 07:36:14.880516 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n9dmh"] Feb 03 07:36:14 crc kubenswrapper[4708]: W0203 07:36:14.887171 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2aa296b0_7fc9_43b8_a149_269ff34dfef8.slice/crio-8f747766fe5f08046e0742f5885434b8f87502a4ef9590f3430e5b03d5810d3a WatchSource:0}: Error finding container 8f747766fe5f08046e0742f5885434b8f87502a4ef9590f3430e5b03d5810d3a: Status 404 returned error can't find the container with id 8f747766fe5f08046e0742f5885434b8f87502a4ef9590f3430e5b03d5810d3a Feb 03 07:36:15 crc kubenswrapper[4708]: I0203 07:36:15.791988 4708 generic.go:334] "Generic (PLEG): container finished" podID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerID="191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab" exitCode=0 Feb 03 07:36:15 crc kubenswrapper[4708]: I0203 07:36:15.792393 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9dmh" event={"ID":"2aa296b0-7fc9-43b8-a149-269ff34dfef8","Type":"ContainerDied","Data":"191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab"} Feb 03 07:36:15 crc kubenswrapper[4708]: I0203 07:36:15.792515 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9dmh" event={"ID":"2aa296b0-7fc9-43b8-a149-269ff34dfef8","Type":"ContainerStarted","Data":"8f747766fe5f08046e0742f5885434b8f87502a4ef9590f3430e5b03d5810d3a"} Feb 03 07:36:17 crc kubenswrapper[4708]: I0203 07:36:17.829903 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9dmh" event={"ID":"2aa296b0-7fc9-43b8-a149-269ff34dfef8","Type":"ContainerStarted","Data":"bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb"} Feb 03 07:36:17 crc kubenswrapper[4708]: I0203 07:36:17.833099 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h9dx" event={"ID":"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b","Type":"ContainerStarted","Data":"56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36"} Feb 03 07:36:17 crc kubenswrapper[4708]: I0203 07:36:17.870535 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6h9dx" podStartSLOduration=3.309012661 podStartE2EDuration="8.870514278s" podCreationTimestamp="2026-02-03 07:36:09 +0000 UTC" firstStartedPulling="2026-02-03 07:36:11.750459382 +0000 UTC m=+1550.732406189" lastFinishedPulling="2026-02-03 07:36:17.311960999 +0000 UTC m=+1556.293907806" observedRunningTime="2026-02-03 07:36:17.8660793 +0000 UTC m=+1556.848026107" watchObservedRunningTime="2026-02-03 07:36:17.870514278 +0000 UTC m=+1556.852461075" Feb 03 07:36:18 crc kubenswrapper[4708]: I0203 07:36:18.850297 4708 generic.go:334] "Generic (PLEG): container finished" podID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerID="bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb" exitCode=0 Feb 03 07:36:18 crc kubenswrapper[4708]: I0203 07:36:18.850353 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9dmh" event={"ID":"2aa296b0-7fc9-43b8-a149-269ff34dfef8","Type":"ContainerDied","Data":"bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb"} Feb 03 07:36:19 crc kubenswrapper[4708]: I0203 07:36:19.811721 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:19 crc kubenswrapper[4708]: I0203 07:36:19.812143 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:19 crc kubenswrapper[4708]: I0203 07:36:19.858906 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:20 crc kubenswrapper[4708]: I0203 07:36:20.871174 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9dmh" event={"ID":"2aa296b0-7fc9-43b8-a149-269ff34dfef8","Type":"ContainerStarted","Data":"ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727"} Feb 03 07:36:20 crc kubenswrapper[4708]: I0203 07:36:20.890654 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n9dmh" podStartSLOduration=4.204627474 podStartE2EDuration="7.890634175s" podCreationTimestamp="2026-02-03 07:36:13 +0000 UTC" firstStartedPulling="2026-02-03 07:36:16.930503275 +0000 UTC m=+1555.912450082" lastFinishedPulling="2026-02-03 07:36:20.616509976 +0000 UTC m=+1559.598456783" observedRunningTime="2026-02-03 07:36:20.889491856 +0000 UTC m=+1559.871438663" watchObservedRunningTime="2026-02-03 07:36:20.890634175 +0000 UTC m=+1559.872580972" Feb 03 07:36:24 crc kubenswrapper[4708]: I0203 07:36:24.237339 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:24 crc kubenswrapper[4708]: I0203 07:36:24.238950 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:24 crc kubenswrapper[4708]: I0203 07:36:24.304656 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:25 crc kubenswrapper[4708]: I0203 07:36:25.971261 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:26 crc kubenswrapper[4708]: I0203 07:36:26.038879 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n9dmh"] Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.620157 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v8tcm"] Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.622916 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.713194 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v8tcm"] Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.785930 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-utilities\") pod \"redhat-operators-v8tcm\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.786134 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6x59\" (UniqueName: \"kubernetes.io/projected/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-kube-api-access-f6x59\") pod \"redhat-operators-v8tcm\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.786205 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-catalog-content\") pod \"redhat-operators-v8tcm\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.887963 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-catalog-content\") pod \"redhat-operators-v8tcm\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.888123 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-utilities\") pod \"redhat-operators-v8tcm\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.888244 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6x59\" (UniqueName: \"kubernetes.io/projected/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-kube-api-access-f6x59\") pod \"redhat-operators-v8tcm\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.888632 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-catalog-content\") pod \"redhat-operators-v8tcm\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.888652 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-utilities\") pod \"redhat-operators-v8tcm\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.923573 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6x59\" (UniqueName: \"kubernetes.io/projected/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-kube-api-access-f6x59\") pod \"redhat-operators-v8tcm\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:27 crc kubenswrapper[4708]: I0203 07:36:27.933375 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n9dmh" podUID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerName="registry-server" containerID="cri-o://ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727" gracePeriod=2 Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.009287 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.594184 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.687081 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v8tcm"] Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.713731 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-utilities\") pod \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.714310 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8ddv\" (UniqueName: \"kubernetes.io/projected/2aa296b0-7fc9-43b8-a149-269ff34dfef8-kube-api-access-p8ddv\") pod \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.714439 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-catalog-content\") pod \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\" (UID: \"2aa296b0-7fc9-43b8-a149-269ff34dfef8\") " Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.714842 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-utilities" (OuterVolumeSpecName: "utilities") pod "2aa296b0-7fc9-43b8-a149-269ff34dfef8" (UID: "2aa296b0-7fc9-43b8-a149-269ff34dfef8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.715534 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.723978 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2aa296b0-7fc9-43b8-a149-269ff34dfef8-kube-api-access-p8ddv" (OuterVolumeSpecName: "kube-api-access-p8ddv") pod "2aa296b0-7fc9-43b8-a149-269ff34dfef8" (UID: "2aa296b0-7fc9-43b8-a149-269ff34dfef8"). InnerVolumeSpecName "kube-api-access-p8ddv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.787188 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2aa296b0-7fc9-43b8-a149-269ff34dfef8" (UID: "2aa296b0-7fc9-43b8-a149-269ff34dfef8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.817469 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8ddv\" (UniqueName: \"kubernetes.io/projected/2aa296b0-7fc9-43b8-a149-269ff34dfef8-kube-api-access-p8ddv\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.817508 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aa296b0-7fc9-43b8-a149-269ff34dfef8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.945020 4708 generic.go:334] "Generic (PLEG): container finished" podID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerID="ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727" exitCode=0 Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.945091 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9dmh" Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.945105 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9dmh" event={"ID":"2aa296b0-7fc9-43b8-a149-269ff34dfef8","Type":"ContainerDied","Data":"ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727"} Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.945138 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9dmh" event={"ID":"2aa296b0-7fc9-43b8-a149-269ff34dfef8","Type":"ContainerDied","Data":"8f747766fe5f08046e0742f5885434b8f87502a4ef9590f3430e5b03d5810d3a"} Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.945164 4708 scope.go:117] "RemoveContainer" containerID="ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727" Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.962083 4708 generic.go:334] "Generic (PLEG): container finished" podID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerID="672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4" exitCode=0 Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.962127 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8tcm" event={"ID":"96687e5d-529d-4edc-b3c4-72f5ed7b83f3","Type":"ContainerDied","Data":"672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4"} Feb 03 07:36:28 crc kubenswrapper[4708]: I0203 07:36:28.962153 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8tcm" event={"ID":"96687e5d-529d-4edc-b3c4-72f5ed7b83f3","Type":"ContainerStarted","Data":"2298d470bc49644a92a02ad01f3cbfcb7fff9586193a33cceb1866304612ce8b"} Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.038606 4708 scope.go:117] "RemoveContainer" containerID="bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb" Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.044180 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n9dmh"] Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.055595 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n9dmh"] Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.074638 4708 scope.go:117] "RemoveContainer" containerID="191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab" Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.114549 4708 scope.go:117] "RemoveContainer" containerID="ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727" Feb 03 07:36:29 crc kubenswrapper[4708]: E0203 07:36:29.115044 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727\": container with ID starting with ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727 not found: ID does not exist" containerID="ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727" Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.115080 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727"} err="failed to get container status \"ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727\": rpc error: code = NotFound desc = could not find container \"ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727\": container with ID starting with ba87d5159de6cb38c0190c8c30e9abdbf63e6279594783781df524af50bba727 not found: ID does not exist" Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.115109 4708 scope.go:117] "RemoveContainer" containerID="bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb" Feb 03 07:36:29 crc kubenswrapper[4708]: E0203 07:36:29.115522 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb\": container with ID starting with bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb not found: ID does not exist" containerID="bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb" Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.115562 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb"} err="failed to get container status \"bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb\": rpc error: code = NotFound desc = could not find container \"bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb\": container with ID starting with bdff98f3588b268356fcc8b8492313a7a1def1d45db7f2e2a6ad017f4fb963bb not found: ID does not exist" Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.115586 4708 scope.go:117] "RemoveContainer" containerID="191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab" Feb 03 07:36:29 crc kubenswrapper[4708]: E0203 07:36:29.116147 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab\": container with ID starting with 191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab not found: ID does not exist" containerID="191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab" Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.116167 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab"} err="failed to get container status \"191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab\": rpc error: code = NotFound desc = could not find container \"191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab\": container with ID starting with 191ab5fa7f8a2a7a4313d50f8a51d9b17b81a084131cc236e82ed1c5381305ab not found: ID does not exist" Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.871036 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:29 crc kubenswrapper[4708]: I0203 07:36:29.975558 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8tcm" event={"ID":"96687e5d-529d-4edc-b3c4-72f5ed7b83f3","Type":"ContainerStarted","Data":"d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299"} Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.104153 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" path="/var/lib/kubelet/pods/2aa296b0-7fc9-43b8-a149-269ff34dfef8/volumes" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.672861 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6sv5c"] Feb 03 07:36:30 crc kubenswrapper[4708]: E0203 07:36:30.675431 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerName="extract-utilities" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.675486 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerName="extract-utilities" Feb 03 07:36:30 crc kubenswrapper[4708]: E0203 07:36:30.675537 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerName="registry-server" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.675548 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerName="registry-server" Feb 03 07:36:30 crc kubenswrapper[4708]: E0203 07:36:30.675606 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerName="extract-content" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.675614 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerName="extract-content" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.676378 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="2aa296b0-7fc9-43b8-a149-269ff34dfef8" containerName="registry-server" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.685489 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.686509 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6sv5c"] Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.879407 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7864s\" (UniqueName: \"kubernetes.io/projected/1816e893-f8eb-4102-b0a5-c5043e8d109f-kube-api-access-7864s\") pod \"redhat-marketplace-6sv5c\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.879842 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-catalog-content\") pod \"redhat-marketplace-6sv5c\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.879949 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-utilities\") pod \"redhat-marketplace-6sv5c\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.982313 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7864s\" (UniqueName: \"kubernetes.io/projected/1816e893-f8eb-4102-b0a5-c5043e8d109f-kube-api-access-7864s\") pod \"redhat-marketplace-6sv5c\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.982397 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-catalog-content\") pod \"redhat-marketplace-6sv5c\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.982496 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-utilities\") pod \"redhat-marketplace-6sv5c\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.983246 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-catalog-content\") pod \"redhat-marketplace-6sv5c\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:30 crc kubenswrapper[4708]: I0203 07:36:30.983340 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-utilities\") pod \"redhat-marketplace-6sv5c\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:31 crc kubenswrapper[4708]: I0203 07:36:31.003752 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7864s\" (UniqueName: \"kubernetes.io/projected/1816e893-f8eb-4102-b0a5-c5043e8d109f-kube-api-access-7864s\") pod \"redhat-marketplace-6sv5c\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:31 crc kubenswrapper[4708]: I0203 07:36:31.044082 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:31 crc kubenswrapper[4708]: I0203 07:36:31.598880 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6sv5c"] Feb 03 07:36:31 crc kubenswrapper[4708]: I0203 07:36:31.994790 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6sv5c" event={"ID":"1816e893-f8eb-4102-b0a5-c5043e8d109f","Type":"ContainerStarted","Data":"c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2"} Feb 03 07:36:31 crc kubenswrapper[4708]: I0203 07:36:31.995160 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6sv5c" event={"ID":"1816e893-f8eb-4102-b0a5-c5043e8d109f","Type":"ContainerStarted","Data":"a1d266bf3c5e4320f14fe7dd7acd042e7713579ef9bd0345271a93addefc6c55"} Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.005850 4708 generic.go:334] "Generic (PLEG): container finished" podID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerID="d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299" exitCode=0 Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.005917 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8tcm" event={"ID":"96687e5d-529d-4edc-b3c4-72f5ed7b83f3","Type":"ContainerDied","Data":"d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299"} Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.010198 4708 generic.go:334] "Generic (PLEG): container finished" podID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerID="c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2" exitCode=0 Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.010237 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6sv5c" event={"ID":"1816e893-f8eb-4102-b0a5-c5043e8d109f","Type":"ContainerDied","Data":"c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2"} Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.214322 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6h9dx"] Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.214682 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6h9dx" podUID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerName="registry-server" containerID="cri-o://56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36" gracePeriod=2 Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.847341 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.945749 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-catalog-content\") pod \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.945902 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sc7mx\" (UniqueName: \"kubernetes.io/projected/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-kube-api-access-sc7mx\") pod \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.945949 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-utilities\") pod \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\" (UID: \"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b\") " Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.946781 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-utilities" (OuterVolumeSpecName: "utilities") pod "6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" (UID: "6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:36:33 crc kubenswrapper[4708]: I0203 07:36:33.955364 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-kube-api-access-sc7mx" (OuterVolumeSpecName: "kube-api-access-sc7mx") pod "6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" (UID: "6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b"). InnerVolumeSpecName "kube-api-access-sc7mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.049825 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" (UID: "6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.051844 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.051881 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sc7mx\" (UniqueName: \"kubernetes.io/projected/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-kube-api-access-sc7mx\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.051898 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.062197 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8tcm" event={"ID":"96687e5d-529d-4edc-b3c4-72f5ed7b83f3","Type":"ContainerStarted","Data":"22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28"} Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.076103 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6sv5c" event={"ID":"1816e893-f8eb-4102-b0a5-c5043e8d109f","Type":"ContainerStarted","Data":"944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd"} Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.083855 4708 generic.go:334] "Generic (PLEG): container finished" podID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerID="56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36" exitCode=0 Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.083918 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h9dx" event={"ID":"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b","Type":"ContainerDied","Data":"56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36"} Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.083955 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6h9dx" event={"ID":"6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b","Type":"ContainerDied","Data":"33c0806ead1046da4993bbab77fc8b54c6c5cb2cc554299222d8d97c436927db"} Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.083978 4708 scope.go:117] "RemoveContainer" containerID="56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.084211 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6h9dx" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.112851 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v8tcm" podStartSLOduration=2.419617642 podStartE2EDuration="7.112814911s" podCreationTimestamp="2026-02-03 07:36:27 +0000 UTC" firstStartedPulling="2026-02-03 07:36:28.964266497 +0000 UTC m=+1567.946213304" lastFinishedPulling="2026-02-03 07:36:33.657463736 +0000 UTC m=+1572.639410573" observedRunningTime="2026-02-03 07:36:34.086939717 +0000 UTC m=+1573.068886534" watchObservedRunningTime="2026-02-03 07:36:34.112814911 +0000 UTC m=+1573.094761718" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.144564 4708 scope.go:117] "RemoveContainer" containerID="230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.167933 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6h9dx"] Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.176672 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6h9dx"] Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.228429 4708 scope.go:117] "RemoveContainer" containerID="45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.251748 4708 scope.go:117] "RemoveContainer" containerID="56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36" Feb 03 07:36:34 crc kubenswrapper[4708]: E0203 07:36:34.252182 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36\": container with ID starting with 56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36 not found: ID does not exist" containerID="56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.252223 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36"} err="failed to get container status \"56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36\": rpc error: code = NotFound desc = could not find container \"56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36\": container with ID starting with 56419b322574c658acf611512aab384eed66e233cae82a0cfd9ef0df056e8d36 not found: ID does not exist" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.252250 4708 scope.go:117] "RemoveContainer" containerID="230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609" Feb 03 07:36:34 crc kubenswrapper[4708]: E0203 07:36:34.252494 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609\": container with ID starting with 230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609 not found: ID does not exist" containerID="230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.252527 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609"} err="failed to get container status \"230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609\": rpc error: code = NotFound desc = could not find container \"230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609\": container with ID starting with 230b37499ab5687b9f4f0b71db911c510ee3db821f86157a98dde7643f297609 not found: ID does not exist" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.252577 4708 scope.go:117] "RemoveContainer" containerID="45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0" Feb 03 07:36:34 crc kubenswrapper[4708]: E0203 07:36:34.252901 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0\": container with ID starting with 45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0 not found: ID does not exist" containerID="45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0" Feb 03 07:36:34 crc kubenswrapper[4708]: I0203 07:36:34.252962 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0"} err="failed to get container status \"45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0\": rpc error: code = NotFound desc = could not find container \"45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0\": container with ID starting with 45d454105c83f98885e2a58d3220b6b88f2990f10612c306313cf31ea0b222f0 not found: ID does not exist" Feb 03 07:36:35 crc kubenswrapper[4708]: I0203 07:36:35.096462 4708 generic.go:334] "Generic (PLEG): container finished" podID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerID="944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd" exitCode=0 Feb 03 07:36:35 crc kubenswrapper[4708]: I0203 07:36:35.096516 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6sv5c" event={"ID":"1816e893-f8eb-4102-b0a5-c5043e8d109f","Type":"ContainerDied","Data":"944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd"} Feb 03 07:36:36 crc kubenswrapper[4708]: I0203 07:36:36.104052 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" path="/var/lib/kubelet/pods/6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b/volumes" Feb 03 07:36:36 crc kubenswrapper[4708]: I0203 07:36:36.108890 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6sv5c" event={"ID":"1816e893-f8eb-4102-b0a5-c5043e8d109f","Type":"ContainerStarted","Data":"ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43"} Feb 03 07:36:36 crc kubenswrapper[4708]: I0203 07:36:36.146121 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6sv5c" podStartSLOduration=3.650498927 podStartE2EDuration="6.146097539s" podCreationTimestamp="2026-02-03 07:36:30 +0000 UTC" firstStartedPulling="2026-02-03 07:36:33.013002356 +0000 UTC m=+1571.994949163" lastFinishedPulling="2026-02-03 07:36:35.508600968 +0000 UTC m=+1574.490547775" observedRunningTime="2026-02-03 07:36:36.130829626 +0000 UTC m=+1575.112776433" watchObservedRunningTime="2026-02-03 07:36:36.146097539 +0000 UTC m=+1575.128044346" Feb 03 07:36:38 crc kubenswrapper[4708]: I0203 07:36:38.010256 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:38 crc kubenswrapper[4708]: I0203 07:36:38.010622 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:39 crc kubenswrapper[4708]: I0203 07:36:39.065847 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-v8tcm" podUID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerName="registry-server" probeResult="failure" output=< Feb 03 07:36:39 crc kubenswrapper[4708]: timeout: failed to connect service ":50051" within 1s Feb 03 07:36:39 crc kubenswrapper[4708]: > Feb 03 07:36:41 crc kubenswrapper[4708]: I0203 07:36:41.044258 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:41 crc kubenswrapper[4708]: I0203 07:36:41.045820 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:41 crc kubenswrapper[4708]: I0203 07:36:41.116634 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:41 crc kubenswrapper[4708]: I0203 07:36:41.243542 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:41 crc kubenswrapper[4708]: I0203 07:36:41.361016 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6sv5c"] Feb 03 07:36:43 crc kubenswrapper[4708]: I0203 07:36:43.204252 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6sv5c" podUID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerName="registry-server" containerID="cri-o://ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43" gracePeriod=2 Feb 03 07:36:43 crc kubenswrapper[4708]: I0203 07:36:43.789394 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:43 crc kubenswrapper[4708]: I0203 07:36:43.942761 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7864s\" (UniqueName: \"kubernetes.io/projected/1816e893-f8eb-4102-b0a5-c5043e8d109f-kube-api-access-7864s\") pod \"1816e893-f8eb-4102-b0a5-c5043e8d109f\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " Feb 03 07:36:43 crc kubenswrapper[4708]: I0203 07:36:43.942910 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-utilities\") pod \"1816e893-f8eb-4102-b0a5-c5043e8d109f\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " Feb 03 07:36:43 crc kubenswrapper[4708]: I0203 07:36:43.943069 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-catalog-content\") pod \"1816e893-f8eb-4102-b0a5-c5043e8d109f\" (UID: \"1816e893-f8eb-4102-b0a5-c5043e8d109f\") " Feb 03 07:36:43 crc kubenswrapper[4708]: I0203 07:36:43.944034 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-utilities" (OuterVolumeSpecName: "utilities") pod "1816e893-f8eb-4102-b0a5-c5043e8d109f" (UID: "1816e893-f8eb-4102-b0a5-c5043e8d109f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:36:43 crc kubenswrapper[4708]: I0203 07:36:43.951849 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1816e893-f8eb-4102-b0a5-c5043e8d109f-kube-api-access-7864s" (OuterVolumeSpecName: "kube-api-access-7864s") pod "1816e893-f8eb-4102-b0a5-c5043e8d109f" (UID: "1816e893-f8eb-4102-b0a5-c5043e8d109f"). InnerVolumeSpecName "kube-api-access-7864s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:36:43 crc kubenswrapper[4708]: I0203 07:36:43.971557 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1816e893-f8eb-4102-b0a5-c5043e8d109f" (UID: "1816e893-f8eb-4102-b0a5-c5043e8d109f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.045648 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.045685 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7864s\" (UniqueName: \"kubernetes.io/projected/1816e893-f8eb-4102-b0a5-c5043e8d109f-kube-api-access-7864s\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.045697 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1816e893-f8eb-4102-b0a5-c5043e8d109f-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.214523 4708 generic.go:334] "Generic (PLEG): container finished" podID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerID="ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43" exitCode=0 Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.214568 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6sv5c" event={"ID":"1816e893-f8eb-4102-b0a5-c5043e8d109f","Type":"ContainerDied","Data":"ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43"} Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.214597 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6sv5c" event={"ID":"1816e893-f8eb-4102-b0a5-c5043e8d109f","Type":"ContainerDied","Data":"a1d266bf3c5e4320f14fe7dd7acd042e7713579ef9bd0345271a93addefc6c55"} Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.214618 4708 scope.go:117] "RemoveContainer" containerID="ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.216438 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6sv5c" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.240442 4708 scope.go:117] "RemoveContainer" containerID="944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.242193 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6sv5c"] Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.254163 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6sv5c"] Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.262329 4708 scope.go:117] "RemoveContainer" containerID="c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.310927 4708 scope.go:117] "RemoveContainer" containerID="ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43" Feb 03 07:36:44 crc kubenswrapper[4708]: E0203 07:36:44.311601 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43\": container with ID starting with ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43 not found: ID does not exist" containerID="ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.311848 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43"} err="failed to get container status \"ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43\": rpc error: code = NotFound desc = could not find container \"ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43\": container with ID starting with ae4305262188d8308f50e57b0f638dc7c991f6ad974c1583fff6d8b01c1bec43 not found: ID does not exist" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.311964 4708 scope.go:117] "RemoveContainer" containerID="944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd" Feb 03 07:36:44 crc kubenswrapper[4708]: E0203 07:36:44.312231 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd\": container with ID starting with 944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd not found: ID does not exist" containerID="944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.312303 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd"} err="failed to get container status \"944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd\": rpc error: code = NotFound desc = could not find container \"944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd\": container with ID starting with 944c2a463e88ddb4e1b8ff86f10fc75630a7eff0698a83e7bc5c6a6f23e848cd not found: ID does not exist" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.312365 4708 scope.go:117] "RemoveContainer" containerID="c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2" Feb 03 07:36:44 crc kubenswrapper[4708]: E0203 07:36:44.312783 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2\": container with ID starting with c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2 not found: ID does not exist" containerID="c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2" Feb 03 07:36:44 crc kubenswrapper[4708]: I0203 07:36:44.313091 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2"} err="failed to get container status \"c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2\": rpc error: code = NotFound desc = could not find container \"c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2\": container with ID starting with c379f074d66023e4ac21418a2251dfdfb03090e1291a2f7b31d058aab70247d2 not found: ID does not exist" Feb 03 07:36:46 crc kubenswrapper[4708]: I0203 07:36:46.105779 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1816e893-f8eb-4102-b0a5-c5043e8d109f" path="/var/lib/kubelet/pods/1816e893-f8eb-4102-b0a5-c5043e8d109f/volumes" Feb 03 07:36:46 crc kubenswrapper[4708]: I0203 07:36:46.239861 4708 scope.go:117] "RemoveContainer" containerID="05ba5b153a55cc9afb5a266092ec64c2ba5e7619492672973c37f7232fed0dea" Feb 03 07:36:46 crc kubenswrapper[4708]: I0203 07:36:46.372784 4708 scope.go:117] "RemoveContainer" containerID="d7000c44f31f7d3fd85e8c61aaa9b8fb0676c3100fb54c041b6997124ba8499d" Feb 03 07:36:46 crc kubenswrapper[4708]: I0203 07:36:46.716043 4708 scope.go:117] "RemoveContainer" containerID="d3e3f4f752245ffe166a35e679f85b8e9c0ba5f8fc2490175cf70632d2a70448" Feb 03 07:36:48 crc kubenswrapper[4708]: I0203 07:36:48.088526 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:48 crc kubenswrapper[4708]: I0203 07:36:48.143600 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:48 crc kubenswrapper[4708]: I0203 07:36:48.757154 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v8tcm"] Feb 03 07:36:49 crc kubenswrapper[4708]: I0203 07:36:49.263981 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v8tcm" podUID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerName="registry-server" containerID="cri-o://22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28" gracePeriod=2 Feb 03 07:36:49 crc kubenswrapper[4708]: I0203 07:36:49.767402 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:49 crc kubenswrapper[4708]: I0203 07:36:49.884978 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6x59\" (UniqueName: \"kubernetes.io/projected/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-kube-api-access-f6x59\") pod \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " Feb 03 07:36:49 crc kubenswrapper[4708]: I0203 07:36:49.885217 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-catalog-content\") pod \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " Feb 03 07:36:49 crc kubenswrapper[4708]: I0203 07:36:49.885255 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-utilities\") pod \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\" (UID: \"96687e5d-529d-4edc-b3c4-72f5ed7b83f3\") " Feb 03 07:36:49 crc kubenswrapper[4708]: I0203 07:36:49.885811 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-utilities" (OuterVolumeSpecName: "utilities") pod "96687e5d-529d-4edc-b3c4-72f5ed7b83f3" (UID: "96687e5d-529d-4edc-b3c4-72f5ed7b83f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:36:49 crc kubenswrapper[4708]: I0203 07:36:49.892157 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-kube-api-access-f6x59" (OuterVolumeSpecName: "kube-api-access-f6x59") pod "96687e5d-529d-4edc-b3c4-72f5ed7b83f3" (UID: "96687e5d-529d-4edc-b3c4-72f5ed7b83f3"). InnerVolumeSpecName "kube-api-access-f6x59". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:36:49 crc kubenswrapper[4708]: I0203 07:36:49.987020 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:49 crc kubenswrapper[4708]: I0203 07:36:49.987066 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6x59\" (UniqueName: \"kubernetes.io/projected/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-kube-api-access-f6x59\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.018031 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "96687e5d-529d-4edc-b3c4-72f5ed7b83f3" (UID: "96687e5d-529d-4edc-b3c4-72f5ed7b83f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.094175 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96687e5d-529d-4edc-b3c4-72f5ed7b83f3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.275688 4708 generic.go:334] "Generic (PLEG): container finished" podID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerID="22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28" exitCode=0 Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.275726 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8tcm" event={"ID":"96687e5d-529d-4edc-b3c4-72f5ed7b83f3","Type":"ContainerDied","Data":"22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28"} Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.276934 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8tcm" event={"ID":"96687e5d-529d-4edc-b3c4-72f5ed7b83f3","Type":"ContainerDied","Data":"2298d470bc49644a92a02ad01f3cbfcb7fff9586193a33cceb1866304612ce8b"} Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.277005 4708 scope.go:117] "RemoveContainer" containerID="22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.275762 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8tcm" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.299960 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v8tcm"] Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.305067 4708 scope.go:117] "RemoveContainer" containerID="d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.309934 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v8tcm"] Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.330062 4708 scope.go:117] "RemoveContainer" containerID="672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.372954 4708 scope.go:117] "RemoveContainer" containerID="22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28" Feb 03 07:36:50 crc kubenswrapper[4708]: E0203 07:36:50.373762 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28\": container with ID starting with 22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28 not found: ID does not exist" containerID="22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.373816 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28"} err="failed to get container status \"22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28\": rpc error: code = NotFound desc = could not find container \"22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28\": container with ID starting with 22e38094c0632b69f6762909f6b90e4edfb5d41ecca6cc5bf86e5ee1a6e19c28 not found: ID does not exist" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.373842 4708 scope.go:117] "RemoveContainer" containerID="d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299" Feb 03 07:36:50 crc kubenswrapper[4708]: E0203 07:36:50.374118 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299\": container with ID starting with d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299 not found: ID does not exist" containerID="d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.374156 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299"} err="failed to get container status \"d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299\": rpc error: code = NotFound desc = could not find container \"d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299\": container with ID starting with d189d1a9648c458a91b942af528d63896b3dd49a0d18df3713b38081ebe30299 not found: ID does not exist" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.374183 4708 scope.go:117] "RemoveContainer" containerID="672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4" Feb 03 07:36:50 crc kubenswrapper[4708]: E0203 07:36:50.374409 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4\": container with ID starting with 672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4 not found: ID does not exist" containerID="672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4" Feb 03 07:36:50 crc kubenswrapper[4708]: I0203 07:36:50.374430 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4"} err="failed to get container status \"672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4\": rpc error: code = NotFound desc = could not find container \"672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4\": container with ID starting with 672b2a006a12b61effc74126fa40c2f9974caf79f419a0db2d7a4a096a96d1a4 not found: ID does not exist" Feb 03 07:36:52 crc kubenswrapper[4708]: I0203 07:36:52.106586 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" path="/var/lib/kubelet/pods/96687e5d-529d-4edc-b3c4-72f5ed7b83f3/volumes" Feb 03 07:37:09 crc kubenswrapper[4708]: I0203 07:37:09.049229 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-487ld"] Feb 03 07:37:09 crc kubenswrapper[4708]: I0203 07:37:09.064904 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-9889-account-create-update-xfq6v"] Feb 03 07:37:09 crc kubenswrapper[4708]: I0203 07:37:09.074173 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-9889-account-create-update-xfq6v"] Feb 03 07:37:09 crc kubenswrapper[4708]: I0203 07:37:09.082561 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-487ld"] Feb 03 07:37:10 crc kubenswrapper[4708]: I0203 07:37:10.104975 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ecd1be3-40af-41af-bbc6-78a346f02c44" path="/var/lib/kubelet/pods/7ecd1be3-40af-41af-bbc6-78a346f02c44/volumes" Feb 03 07:37:10 crc kubenswrapper[4708]: I0203 07:37:10.105671 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acb52368-cf93-4c82-926a-665f665ed84a" path="/var/lib/kubelet/pods/acb52368-cf93-4c82-926a-665f665ed84a/volumes" Feb 03 07:37:13 crc kubenswrapper[4708]: I0203 07:37:13.048233 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5956-account-create-update-tvxq4"] Feb 03 07:37:13 crc kubenswrapper[4708]: I0203 07:37:13.060158 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-fjkqx"] Feb 03 07:37:13 crc kubenswrapper[4708]: I0203 07:37:13.069427 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-vsnx7"] Feb 03 07:37:13 crc kubenswrapper[4708]: I0203 07:37:13.079470 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-5956-account-create-update-tvxq4"] Feb 03 07:37:13 crc kubenswrapper[4708]: I0203 07:37:13.091833 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-vsnx7"] Feb 03 07:37:13 crc kubenswrapper[4708]: I0203 07:37:13.101787 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-fjkqx"] Feb 03 07:37:14 crc kubenswrapper[4708]: I0203 07:37:14.105615 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="028be7ae-713a-49b6-9f35-930e0016d066" path="/var/lib/kubelet/pods/028be7ae-713a-49b6-9f35-930e0016d066/volumes" Feb 03 07:37:14 crc kubenswrapper[4708]: I0203 07:37:14.106407 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2" path="/var/lib/kubelet/pods/43bdfaf6-c8f1-4705-b450-c8fd7e7cc2e2/volumes" Feb 03 07:37:14 crc kubenswrapper[4708]: I0203 07:37:14.107154 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c944aa48-f235-469e-8513-106dab7e315a" path="/var/lib/kubelet/pods/c944aa48-f235-469e-8513-106dab7e315a/volumes" Feb 03 07:37:15 crc kubenswrapper[4708]: I0203 07:37:15.032744 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-07b1-account-create-update-xsnff"] Feb 03 07:37:15 crc kubenswrapper[4708]: I0203 07:37:15.070663 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-07b1-account-create-update-xsnff"] Feb 03 07:37:16 crc kubenswrapper[4708]: I0203 07:37:16.107876 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d163e5d-5089-42c1-b1d5-12960d6da873" path="/var/lib/kubelet/pods/4d163e5d-5089-42c1-b1d5-12960d6da873/volumes" Feb 03 07:37:23 crc kubenswrapper[4708]: I0203 07:37:23.833360 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:37:23 crc kubenswrapper[4708]: I0203 07:37:23.833993 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.047492 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-dbf8-account-create-update-gxd7n"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.057017 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-0d95-account-create-update-jbjg4"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.074164 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-45fjr"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.080490 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-a9e3-account-create-update-hf4t4"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.088585 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-5knks"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.096833 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-kk829"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.106909 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-5knks"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.117026 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-kk829"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.125755 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-0d95-account-create-update-jbjg4"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.135282 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-45fjr"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.144760 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-a9e3-account-create-update-hf4t4"] Feb 03 07:37:27 crc kubenswrapper[4708]: I0203 07:37:27.155556 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-dbf8-account-create-update-gxd7n"] Feb 03 07:37:28 crc kubenswrapper[4708]: I0203 07:37:28.106852 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01fcd20e-f6ef-4ecc-b29a-98b053efae92" path="/var/lib/kubelet/pods/01fcd20e-f6ef-4ecc-b29a-98b053efae92/volumes" Feb 03 07:37:28 crc kubenswrapper[4708]: I0203 07:37:28.107931 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="063192c5-ad89-4dde-bad6-af78e8bf8459" path="/var/lib/kubelet/pods/063192c5-ad89-4dde-bad6-af78e8bf8459/volumes" Feb 03 07:37:28 crc kubenswrapper[4708]: I0203 07:37:28.109493 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e8f947f-87bc-4215-8c50-2409fb2b274f" path="/var/lib/kubelet/pods/0e8f947f-87bc-4215-8c50-2409fb2b274f/volumes" Feb 03 07:37:28 crc kubenswrapper[4708]: I0203 07:37:28.110313 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26e5e627-4533-4800-bd50-826271c5dbef" path="/var/lib/kubelet/pods/26e5e627-4533-4800-bd50-826271c5dbef/volumes" Feb 03 07:37:28 crc kubenswrapper[4708]: I0203 07:37:28.112034 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7be3ad58-dba8-419b-85e7-52ca0ae0fe3a" path="/var/lib/kubelet/pods/7be3ad58-dba8-419b-85e7-52ca0ae0fe3a/volumes" Feb 03 07:37:28 crc kubenswrapper[4708]: I0203 07:37:28.112762 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f83726be-8013-490c-92c3-f19b0a04c112" path="/var/lib/kubelet/pods/f83726be-8013-490c-92c3-f19b0a04c112/volumes" Feb 03 07:37:32 crc kubenswrapper[4708]: I0203 07:37:32.033698 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-g8kn6"] Feb 03 07:37:32 crc kubenswrapper[4708]: I0203 07:37:32.043645 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-g8kn6"] Feb 03 07:37:32 crc kubenswrapper[4708]: I0203 07:37:32.110975 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df850a16-e0b1-4d87-913a-a30a7f2365be" path="/var/lib/kubelet/pods/df850a16-e0b1-4d87-913a-a30a7f2365be/volumes" Feb 03 07:37:36 crc kubenswrapper[4708]: I0203 07:37:36.035423 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-sgtsg"] Feb 03 07:37:36 crc kubenswrapper[4708]: I0203 07:37:36.047276 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-sgtsg"] Feb 03 07:37:36 crc kubenswrapper[4708]: I0203 07:37:36.102608 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adc80637-973b-4bd0-b444-4d2d41e23b8b" path="/var/lib/kubelet/pods/adc80637-973b-4bd0-b444-4d2d41e23b8b/volumes" Feb 03 07:37:46 crc kubenswrapper[4708]: I0203 07:37:46.867983 4708 scope.go:117] "RemoveContainer" containerID="c635e016c7bce6032bf15498513c8a4b8b4614d153fdd74bacf749f8217b5492" Feb 03 07:37:46 crc kubenswrapper[4708]: I0203 07:37:46.912924 4708 scope.go:117] "RemoveContainer" containerID="68a58edb6f1952eabb4ec048fa9ded7a670e63db09807e91c7899377931c953c" Feb 03 07:37:46 crc kubenswrapper[4708]: I0203 07:37:46.959810 4708 scope.go:117] "RemoveContainer" containerID="119c7c1f02fcb1f981f75b1abd410e597d9e6a7350b703293d44655b690fbd2d" Feb 03 07:37:46 crc kubenswrapper[4708]: I0203 07:37:46.992085 4708 scope.go:117] "RemoveContainer" containerID="601d94218f3648bfef7b7b2fe2467ec80482bffa67a539703c2f12fcbde7952e" Feb 03 07:37:47 crc kubenswrapper[4708]: I0203 07:37:47.111528 4708 scope.go:117] "RemoveContainer" containerID="2fb83885277b3edaa15fad24d83a064545239de33c1848376dfa7456d2ace340" Feb 03 07:37:47 crc kubenswrapper[4708]: I0203 07:37:47.142125 4708 scope.go:117] "RemoveContainer" containerID="5e3636632c55cf3a594c28ee75b7ad6ad332bcfd0943b701a3b8371981e85f2f" Feb 03 07:37:47 crc kubenswrapper[4708]: I0203 07:37:47.185758 4708 scope.go:117] "RemoveContainer" containerID="05e10050ba3569a7493fc63f945ecba165124bda1a45dd50c95665efe2c2bb19" Feb 03 07:37:47 crc kubenswrapper[4708]: I0203 07:37:47.207285 4708 scope.go:117] "RemoveContainer" containerID="ad977a123df838fec12ca9019254b0cc66a69bf9a2abf58c4a0ddb3bc6a611f7" Feb 03 07:37:47 crc kubenswrapper[4708]: I0203 07:37:47.230669 4708 scope.go:117] "RemoveContainer" containerID="f38feecda0106269e9d4981e28167e1c1768115c78013f575c511383f10dcc17" Feb 03 07:37:47 crc kubenswrapper[4708]: I0203 07:37:47.253162 4708 scope.go:117] "RemoveContainer" containerID="8a23b5de7c532ce6a09b7cc55992a91a57b4f43569b5894c9fe647c58f23d7be" Feb 03 07:37:47 crc kubenswrapper[4708]: I0203 07:37:47.274058 4708 scope.go:117] "RemoveContainer" containerID="f50f4bfa6774b43edeab9cfda1955788a567fb833ab63946760f5f6a8937ada0" Feb 03 07:37:47 crc kubenswrapper[4708]: I0203 07:37:47.303658 4708 scope.go:117] "RemoveContainer" containerID="a4a7ee48486be6c0d8cb255566132c50e9692ed4d7ad09537dc3ba10be929495" Feb 03 07:37:47 crc kubenswrapper[4708]: I0203 07:37:47.345887 4708 scope.go:117] "RemoveContainer" containerID="ab1eaab86bc10e9ddd04d713b0eec8f2a943dfde93409e9ace600168426d7e6b" Feb 03 07:37:47 crc kubenswrapper[4708]: I0203 07:37:47.368511 4708 scope.go:117] "RemoveContainer" containerID="ccb5854ab49b90b36207a116c2ced1149c17ac39facd9a9186817db24360ab5d" Feb 03 07:37:49 crc kubenswrapper[4708]: I0203 07:37:49.028604 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-db-create-ktbjr"] Feb 03 07:37:49 crc kubenswrapper[4708]: I0203 07:37:49.036425 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-bbf1-account-create-update-5htsg"] Feb 03 07:37:49 crc kubenswrapper[4708]: I0203 07:37:49.053068 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-db-create-ktbjr"] Feb 03 07:37:49 crc kubenswrapper[4708]: I0203 07:37:49.066033 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-bbf1-account-create-update-5htsg"] Feb 03 07:37:50 crc kubenswrapper[4708]: I0203 07:37:50.104014 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c0d871a-3a87-4651-8392-e69acf628940" path="/var/lib/kubelet/pods/4c0d871a-3a87-4651-8392-e69acf628940/volumes" Feb 03 07:37:50 crc kubenswrapper[4708]: I0203 07:37:50.105145 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efea8ba9-1f4b-4a03-aafd-8388de2f8bc0" path="/var/lib/kubelet/pods/efea8ba9-1f4b-4a03-aafd-8388de2f8bc0/volumes" Feb 03 07:37:53 crc kubenswrapper[4708]: I0203 07:37:53.832921 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:37:53 crc kubenswrapper[4708]: I0203 07:37:53.833407 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:37:54 crc kubenswrapper[4708]: I0203 07:37:54.033115 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-t95nr"] Feb 03 07:37:54 crc kubenswrapper[4708]: I0203 07:37:54.044167 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-t95nr"] Feb 03 07:37:54 crc kubenswrapper[4708]: I0203 07:37:54.104225 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6201bbf3-523c-4a64-9703-fb0adbc0955a" path="/var/lib/kubelet/pods/6201bbf3-523c-4a64-9703-fb0adbc0955a/volumes" Feb 03 07:38:12 crc kubenswrapper[4708]: I0203 07:38:12.044105 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-blhdl"] Feb 03 07:38:12 crc kubenswrapper[4708]: I0203 07:38:12.055984 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-blhdl"] Feb 03 07:38:12 crc kubenswrapper[4708]: I0203 07:38:12.103847 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6b4400d-8fe0-4b9a-985b-5e7854dcd78d" path="/var/lib/kubelet/pods/f6b4400d-8fe0-4b9a-985b-5e7854dcd78d/volumes" Feb 03 07:38:13 crc kubenswrapper[4708]: I0203 07:38:13.030326 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-cl2c7"] Feb 03 07:38:13 crc kubenswrapper[4708]: I0203 07:38:13.040412 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-cl2c7"] Feb 03 07:38:14 crc kubenswrapper[4708]: I0203 07:38:14.104969 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29" path="/var/lib/kubelet/pods/ef682cb8-b7ce-4b5c-8d85-4844c7ca0c29/volumes" Feb 03 07:38:21 crc kubenswrapper[4708]: I0203 07:38:21.039583 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-2smzn"] Feb 03 07:38:21 crc kubenswrapper[4708]: I0203 07:38:21.052548 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-cxbc2"] Feb 03 07:38:21 crc kubenswrapper[4708]: I0203 07:38:21.066267 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-cxbc2"] Feb 03 07:38:21 crc kubenswrapper[4708]: I0203 07:38:21.079405 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-2smzn"] Feb 03 07:38:22 crc kubenswrapper[4708]: I0203 07:38:22.104960 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a3122ca-fa36-4033-a44f-36d12d0b3f4a" path="/var/lib/kubelet/pods/1a3122ca-fa36-4033-a44f-36d12d0b3f4a/volumes" Feb 03 07:38:22 crc kubenswrapper[4708]: I0203 07:38:22.106078 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcaf1aa4-0bde-49a7-a027-140450f08736" path="/var/lib/kubelet/pods/dcaf1aa4-0bde-49a7-a027-140450f08736/volumes" Feb 03 07:38:23 crc kubenswrapper[4708]: I0203 07:38:23.833621 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:38:23 crc kubenswrapper[4708]: I0203 07:38:23.834062 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:38:23 crc kubenswrapper[4708]: I0203 07:38:23.834124 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:38:23 crc kubenswrapper[4708]: I0203 07:38:23.835172 4708 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5"} pod="openshift-machine-config-operator/machine-config-daemon-r94bn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:38:23 crc kubenswrapper[4708]: I0203 07:38:23.835248 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" containerID="cri-o://1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" gracePeriod=600 Feb 03 07:38:23 crc kubenswrapper[4708]: E0203 07:38:23.969323 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:38:24 crc kubenswrapper[4708]: I0203 07:38:24.174589 4708 generic.go:334] "Generic (PLEG): container finished" podID="67498414-5132-496e-9638-189f5941ace0" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" exitCode=0 Feb 03 07:38:24 crc kubenswrapper[4708]: I0203 07:38:24.174654 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerDied","Data":"1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5"} Feb 03 07:38:24 crc kubenswrapper[4708]: I0203 07:38:24.175176 4708 scope.go:117] "RemoveContainer" containerID="be6dd4d0258e6d5acc381dbf52f783da9b3b29ef844719ae2478040c809eed5d" Feb 03 07:38:24 crc kubenswrapper[4708]: I0203 07:38:24.175924 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:38:24 crc kubenswrapper[4708]: E0203 07:38:24.176230 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.342759 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zh6nf/must-gather-lhfm4"] Feb 03 07:38:33 crc kubenswrapper[4708]: E0203 07:38:33.343602 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerName="extract-utilities" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.343621 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerName="extract-utilities" Feb 03 07:38:33 crc kubenswrapper[4708]: E0203 07:38:33.343637 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerName="extract-content" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.343646 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerName="extract-content" Feb 03 07:38:33 crc kubenswrapper[4708]: E0203 07:38:33.343670 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerName="registry-server" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.343678 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerName="registry-server" Feb 03 07:38:33 crc kubenswrapper[4708]: E0203 07:38:33.343699 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerName="extract-utilities" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.343707 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerName="extract-utilities" Feb 03 07:38:33 crc kubenswrapper[4708]: E0203 07:38:33.343724 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerName="extract-content" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.343732 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerName="extract-content" Feb 03 07:38:33 crc kubenswrapper[4708]: E0203 07:38:33.343749 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerName="extract-content" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.343757 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerName="extract-content" Feb 03 07:38:33 crc kubenswrapper[4708]: E0203 07:38:33.343769 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerName="registry-server" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.343776 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerName="registry-server" Feb 03 07:38:33 crc kubenswrapper[4708]: E0203 07:38:33.343789 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerName="registry-server" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.343816 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerName="registry-server" Feb 03 07:38:33 crc kubenswrapper[4708]: E0203 07:38:33.343838 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerName="extract-utilities" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.343846 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerName="extract-utilities" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.344067 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cd2ac28-466c-41aa-9fe5-dbf4ac32f12b" containerName="registry-server" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.344092 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="96687e5d-529d-4edc-b3c4-72f5ed7b83f3" containerName="registry-server" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.344111 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="1816e893-f8eb-4102-b0a5-c5043e8d109f" containerName="registry-server" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.345400 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/must-gather-lhfm4" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.347038 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-zh6nf"/"default-dockercfg-762f7" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.347038 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zh6nf"/"openshift-service-ca.crt" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.350431 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zh6nf"/"kube-root-ca.crt" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.371866 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zh6nf/must-gather-lhfm4"] Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.450647 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt96g\" (UniqueName: \"kubernetes.io/projected/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-kube-api-access-qt96g\") pod \"must-gather-lhfm4\" (UID: \"d02e34a2-8944-4d52-bfbf-2d5cccbd0435\") " pod="openshift-must-gather-zh6nf/must-gather-lhfm4" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.450738 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-must-gather-output\") pod \"must-gather-lhfm4\" (UID: \"d02e34a2-8944-4d52-bfbf-2d5cccbd0435\") " pod="openshift-must-gather-zh6nf/must-gather-lhfm4" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.552115 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt96g\" (UniqueName: \"kubernetes.io/projected/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-kube-api-access-qt96g\") pod \"must-gather-lhfm4\" (UID: \"d02e34a2-8944-4d52-bfbf-2d5cccbd0435\") " pod="openshift-must-gather-zh6nf/must-gather-lhfm4" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.552203 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-must-gather-output\") pod \"must-gather-lhfm4\" (UID: \"d02e34a2-8944-4d52-bfbf-2d5cccbd0435\") " pod="openshift-must-gather-zh6nf/must-gather-lhfm4" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.552737 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-must-gather-output\") pod \"must-gather-lhfm4\" (UID: \"d02e34a2-8944-4d52-bfbf-2d5cccbd0435\") " pod="openshift-must-gather-zh6nf/must-gather-lhfm4" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.573462 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt96g\" (UniqueName: \"kubernetes.io/projected/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-kube-api-access-qt96g\") pod \"must-gather-lhfm4\" (UID: \"d02e34a2-8944-4d52-bfbf-2d5cccbd0435\") " pod="openshift-must-gather-zh6nf/must-gather-lhfm4" Feb 03 07:38:33 crc kubenswrapper[4708]: I0203 07:38:33.677170 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/must-gather-lhfm4" Feb 03 07:38:34 crc kubenswrapper[4708]: I0203 07:38:34.222680 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zh6nf/must-gather-lhfm4"] Feb 03 07:38:34 crc kubenswrapper[4708]: I0203 07:38:34.265017 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/must-gather-lhfm4" event={"ID":"d02e34a2-8944-4d52-bfbf-2d5cccbd0435","Type":"ContainerStarted","Data":"648a783722c26baf97fe4b80b9b8e87abbbc4206fb0918724281b9ec2a53ec6e"} Feb 03 07:38:35 crc kubenswrapper[4708]: I0203 07:38:35.094114 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:38:35 crc kubenswrapper[4708]: E0203 07:38:35.094687 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:38:39 crc kubenswrapper[4708]: I0203 07:38:39.314167 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/must-gather-lhfm4" event={"ID":"d02e34a2-8944-4d52-bfbf-2d5cccbd0435","Type":"ContainerStarted","Data":"b737fc60629e7895f4ea0a2481cc8b40beab9e9ec242a4e946ea89e9cdab9581"} Feb 03 07:38:39 crc kubenswrapper[4708]: I0203 07:38:39.314761 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/must-gather-lhfm4" event={"ID":"d02e34a2-8944-4d52-bfbf-2d5cccbd0435","Type":"ContainerStarted","Data":"90036df92560178408d0662295a1ac57424b10217653b2f321c4b1cca385bcd1"} Feb 03 07:38:39 crc kubenswrapper[4708]: I0203 07:38:39.335207 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zh6nf/must-gather-lhfm4" podStartSLOduration=2.3477919910000002 podStartE2EDuration="6.33518945s" podCreationTimestamp="2026-02-03 07:38:33 +0000 UTC" firstStartedPulling="2026-02-03 07:38:34.237497295 +0000 UTC m=+1693.219444102" lastFinishedPulling="2026-02-03 07:38:38.224894754 +0000 UTC m=+1697.206841561" observedRunningTime="2026-02-03 07:38:39.335118829 +0000 UTC m=+1698.317065656" watchObservedRunningTime="2026-02-03 07:38:39.33518945 +0000 UTC m=+1698.317136257" Feb 03 07:38:41 crc kubenswrapper[4708]: E0203 07:38:41.071952 4708 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.53:59658->38.102.83.53:41401: read tcp 38.102.83.53:59658->38.102.83.53:41401: read: connection reset by peer Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.050000 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-w75bv"] Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.060863 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-w75bv"] Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.069848 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zh6nf/crc-debug-2x9n5"] Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.071043 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.106598 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fde8edd5-50e0-4bb0-8701-54e0998444a1" path="/var/lib/kubelet/pods/fde8edd5-50e0-4bb0-8701-54e0998444a1/volumes" Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.125357 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hd2p\" (UniqueName: \"kubernetes.io/projected/31a404b5-88e2-4a53-881d-cd50e03b84c0-kube-api-access-6hd2p\") pod \"crc-debug-2x9n5\" (UID: \"31a404b5-88e2-4a53-881d-cd50e03b84c0\") " pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.125453 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/31a404b5-88e2-4a53-881d-cd50e03b84c0-host\") pod \"crc-debug-2x9n5\" (UID: \"31a404b5-88e2-4a53-881d-cd50e03b84c0\") " pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.227022 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hd2p\" (UniqueName: \"kubernetes.io/projected/31a404b5-88e2-4a53-881d-cd50e03b84c0-kube-api-access-6hd2p\") pod \"crc-debug-2x9n5\" (UID: \"31a404b5-88e2-4a53-881d-cd50e03b84c0\") " pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.227134 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/31a404b5-88e2-4a53-881d-cd50e03b84c0-host\") pod \"crc-debug-2x9n5\" (UID: \"31a404b5-88e2-4a53-881d-cd50e03b84c0\") " pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.227244 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/31a404b5-88e2-4a53-881d-cd50e03b84c0-host\") pod \"crc-debug-2x9n5\" (UID: \"31a404b5-88e2-4a53-881d-cd50e03b84c0\") " pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.249591 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hd2p\" (UniqueName: \"kubernetes.io/projected/31a404b5-88e2-4a53-881d-cd50e03b84c0-kube-api-access-6hd2p\") pod \"crc-debug-2x9n5\" (UID: \"31a404b5-88e2-4a53-881d-cd50e03b84c0\") " pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" Feb 03 07:38:42 crc kubenswrapper[4708]: I0203 07:38:42.400112 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" Feb 03 07:38:43 crc kubenswrapper[4708]: I0203 07:38:43.346972 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" event={"ID":"31a404b5-88e2-4a53-881d-cd50e03b84c0","Type":"ContainerStarted","Data":"08167a9a2da8d11bdbfafcb0f24ec9d420a1b014c7309612c9e78afa45e9f914"} Feb 03 07:38:47 crc kubenswrapper[4708]: I0203 07:38:47.642857 4708 scope.go:117] "RemoveContainer" containerID="740a3fafcc5199dcb4e70c85e69211cf63edc3daa6ddc13939b116f97220af85" Feb 03 07:38:47 crc kubenswrapper[4708]: I0203 07:38:47.717475 4708 scope.go:117] "RemoveContainer" containerID="bc6a69b873ed3d6240e916a34664035c89da8030345790479c4daa44494dccfa" Feb 03 07:38:47 crc kubenswrapper[4708]: I0203 07:38:47.778707 4708 scope.go:117] "RemoveContainer" containerID="26d9c4a841e463329378ef7d36ba7eec3ebd9de2972c52e2177fb8f07b5b3163" Feb 03 07:38:47 crc kubenswrapper[4708]: I0203 07:38:47.849713 4708 scope.go:117] "RemoveContainer" containerID="da8fa13fa14d8037f60141061494219bc255d8cbc09e1c7236ac2f6534d3b1d2" Feb 03 07:38:47 crc kubenswrapper[4708]: I0203 07:38:47.905427 4708 scope.go:117] "RemoveContainer" containerID="8210fbdb49636d40b4ad1429001b753c58f8dcb8f8763c5be48d3501f7d51f32" Feb 03 07:38:47 crc kubenswrapper[4708]: I0203 07:38:47.947905 4708 scope.go:117] "RemoveContainer" containerID="004112adfeeb812b356b4367486306021c116a343bd149394818843e76cea9ea" Feb 03 07:38:49 crc kubenswrapper[4708]: I0203 07:38:49.093464 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:38:49 crc kubenswrapper[4708]: E0203 07:38:49.094007 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:38:50 crc kubenswrapper[4708]: I0203 07:38:50.041137 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-inspector-e3db-account-create-update-cm9n8"] Feb 03 07:38:50 crc kubenswrapper[4708]: I0203 07:38:50.054354 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ironic-inspector-db-create-xtlsr"] Feb 03 07:38:50 crc kubenswrapper[4708]: I0203 07:38:50.062624 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-inspector-e3db-account-create-update-cm9n8"] Feb 03 07:38:50 crc kubenswrapper[4708]: I0203 07:38:50.073781 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ironic-inspector-db-create-xtlsr"] Feb 03 07:38:50 crc kubenswrapper[4708]: I0203 07:38:50.107745 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5853c69c-a64c-449e-ab86-1fcb400ddc60" path="/var/lib/kubelet/pods/5853c69c-a64c-449e-ab86-1fcb400ddc60/volumes" Feb 03 07:38:50 crc kubenswrapper[4708]: I0203 07:38:50.108785 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9da48053-326c-45cb-bf1c-fb8890642e13" path="/var/lib/kubelet/pods/9da48053-326c-45cb-bf1c-fb8890642e13/volumes" Feb 03 07:38:55 crc kubenswrapper[4708]: I0203 07:38:55.581972 4708 scope.go:117] "RemoveContainer" containerID="80d477dbaa1060a84fd0e7ac381dbbcd5f194e8f203da36a548b95de6f0cce3f" Feb 03 07:38:55 crc kubenswrapper[4708]: I0203 07:38:55.639074 4708 scope.go:117] "RemoveContainer" containerID="d05044f5e78f95c7b2ed6a411fd0306cbd5f73e1dc075598a9f384118a58bc8f" Feb 03 07:38:55 crc kubenswrapper[4708]: I0203 07:38:55.702889 4708 scope.go:117] "RemoveContainer" containerID="efd0cc66c29a29935ef515388230d893b8545aede61375169757b998a1a0416c" Feb 03 07:38:56 crc kubenswrapper[4708]: I0203 07:38:56.505583 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" event={"ID":"31a404b5-88e2-4a53-881d-cd50e03b84c0","Type":"ContainerStarted","Data":"917ccb593c76edb655ee4b4173185df02e9461312a099c179434e628b8f609ad"} Feb 03 07:38:56 crc kubenswrapper[4708]: I0203 07:38:56.520347 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" podStartSLOduration=1.297385943 podStartE2EDuration="14.520329042s" podCreationTimestamp="2026-02-03 07:38:42 +0000 UTC" firstStartedPulling="2026-02-03 07:38:42.438539525 +0000 UTC m=+1701.420486332" lastFinishedPulling="2026-02-03 07:38:55.661482624 +0000 UTC m=+1714.643429431" observedRunningTime="2026-02-03 07:38:56.518428326 +0000 UTC m=+1715.500375133" watchObservedRunningTime="2026-02-03 07:38:56.520329042 +0000 UTC m=+1715.502275849" Feb 03 07:39:04 crc kubenswrapper[4708]: I0203 07:39:04.094125 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:39:04 crc kubenswrapper[4708]: E0203 07:39:04.095060 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:39:17 crc kubenswrapper[4708]: I0203 07:39:17.044292 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-dkt4g"] Feb 03 07:39:17 crc kubenswrapper[4708]: I0203 07:39:17.054993 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-dkt4g"] Feb 03 07:39:17 crc kubenswrapper[4708]: I0203 07:39:17.092750 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:39:17 crc kubenswrapper[4708]: E0203 07:39:17.093078 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:39:18 crc kubenswrapper[4708]: I0203 07:39:18.108118 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6396b5c7-1019-4539-b518-3fa061f6e53a" path="/var/lib/kubelet/pods/6396b5c7-1019-4539-b518-3fa061f6e53a/volumes" Feb 03 07:39:19 crc kubenswrapper[4708]: I0203 07:39:19.031293 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-84b5-account-create-update-mbh6v"] Feb 03 07:39:19 crc kubenswrapper[4708]: I0203 07:39:19.041017 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-cs5gv"] Feb 03 07:39:19 crc kubenswrapper[4708]: I0203 07:39:19.050376 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-5965-account-create-update-wtxml"] Feb 03 07:39:19 crc kubenswrapper[4708]: I0203 07:39:19.059457 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-tgz8m"] Feb 03 07:39:19 crc kubenswrapper[4708]: I0203 07:39:19.066611 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-84b5-account-create-update-mbh6v"] Feb 03 07:39:19 crc kubenswrapper[4708]: I0203 07:39:19.074115 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-dd77-account-create-update-8bdgp"] Feb 03 07:39:19 crc kubenswrapper[4708]: I0203 07:39:19.081283 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-tgz8m"] Feb 03 07:39:19 crc kubenswrapper[4708]: I0203 07:39:19.088422 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-5965-account-create-update-wtxml"] Feb 03 07:39:19 crc kubenswrapper[4708]: I0203 07:39:19.094678 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-cs5gv"] Feb 03 07:39:19 crc kubenswrapper[4708]: I0203 07:39:19.101352 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-dd77-account-create-update-8bdgp"] Feb 03 07:39:20 crc kubenswrapper[4708]: I0203 07:39:20.103372 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f88611b-6078-4735-9ae1-8f2408ea7457" path="/var/lib/kubelet/pods/2f88611b-6078-4735-9ae1-8f2408ea7457/volumes" Feb 03 07:39:20 crc kubenswrapper[4708]: I0203 07:39:20.104005 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4db39912-1ffe-48e4-b392-f993bbf6ee46" path="/var/lib/kubelet/pods/4db39912-1ffe-48e4-b392-f993bbf6ee46/volumes" Feb 03 07:39:20 crc kubenswrapper[4708]: I0203 07:39:20.104560 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c9ad1ec-0782-4fb8-a838-d44194d33047" path="/var/lib/kubelet/pods/7c9ad1ec-0782-4fb8-a838-d44194d33047/volumes" Feb 03 07:39:20 crc kubenswrapper[4708]: I0203 07:39:20.105113 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3" path="/var/lib/kubelet/pods/87b3dc98-3c49-4dc8-a2e1-ca88f869ecf3/volumes" Feb 03 07:39:20 crc kubenswrapper[4708]: I0203 07:39:20.106139 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1abbf4d-806b-40fa-9e1f-b415c5f8488e" path="/var/lib/kubelet/pods/b1abbf4d-806b-40fa-9e1f-b415c5f8488e/volumes" Feb 03 07:39:28 crc kubenswrapper[4708]: I0203 07:39:28.093286 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:39:28 crc kubenswrapper[4708]: E0203 07:39:28.094115 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:39:42 crc kubenswrapper[4708]: I0203 07:39:42.107911 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:39:42 crc kubenswrapper[4708]: E0203 07:39:42.109028 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:39:44 crc kubenswrapper[4708]: I0203 07:39:44.929810 4708 generic.go:334] "Generic (PLEG): container finished" podID="31a404b5-88e2-4a53-881d-cd50e03b84c0" containerID="917ccb593c76edb655ee4b4173185df02e9461312a099c179434e628b8f609ad" exitCode=0 Feb 03 07:39:44 crc kubenswrapper[4708]: I0203 07:39:44.929873 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" event={"ID":"31a404b5-88e2-4a53-881d-cd50e03b84c0","Type":"ContainerDied","Data":"917ccb593c76edb655ee4b4173185df02e9461312a099c179434e628b8f609ad"} Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.052312 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.132986 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zh6nf/crc-debug-2x9n5"] Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.133401 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zh6nf/crc-debug-2x9n5"] Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.225429 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/31a404b5-88e2-4a53-881d-cd50e03b84c0-host\") pod \"31a404b5-88e2-4a53-881d-cd50e03b84c0\" (UID: \"31a404b5-88e2-4a53-881d-cd50e03b84c0\") " Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.225728 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hd2p\" (UniqueName: \"kubernetes.io/projected/31a404b5-88e2-4a53-881d-cd50e03b84c0-kube-api-access-6hd2p\") pod \"31a404b5-88e2-4a53-881d-cd50e03b84c0\" (UID: \"31a404b5-88e2-4a53-881d-cd50e03b84c0\") " Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.225733 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/31a404b5-88e2-4a53-881d-cd50e03b84c0-host" (OuterVolumeSpecName: "host") pod "31a404b5-88e2-4a53-881d-cd50e03b84c0" (UID: "31a404b5-88e2-4a53-881d-cd50e03b84c0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.227240 4708 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/31a404b5-88e2-4a53-881d-cd50e03b84c0-host\") on node \"crc\" DevicePath \"\"" Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.233260 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31a404b5-88e2-4a53-881d-cd50e03b84c0-kube-api-access-6hd2p" (OuterVolumeSpecName: "kube-api-access-6hd2p") pod "31a404b5-88e2-4a53-881d-cd50e03b84c0" (UID: "31a404b5-88e2-4a53-881d-cd50e03b84c0"). InnerVolumeSpecName "kube-api-access-6hd2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.328615 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hd2p\" (UniqueName: \"kubernetes.io/projected/31a404b5-88e2-4a53-881d-cd50e03b84c0-kube-api-access-6hd2p\") on node \"crc\" DevicePath \"\"" Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.948935 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08167a9a2da8d11bdbfafcb0f24ec9d420a1b014c7309612c9e78afa45e9f914" Feb 03 07:39:46 crc kubenswrapper[4708]: I0203 07:39:46.949268 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-2x9n5" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.263450 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zh6nf/crc-debug-hw5qg"] Feb 03 07:39:47 crc kubenswrapper[4708]: E0203 07:39:47.264984 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31a404b5-88e2-4a53-881d-cd50e03b84c0" containerName="container-00" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.265067 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="31a404b5-88e2-4a53-881d-cd50e03b84c0" containerName="container-00" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.265329 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="31a404b5-88e2-4a53-881d-cd50e03b84c0" containerName="container-00" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.266148 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.346059 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5pwr\" (UniqueName: \"kubernetes.io/projected/8dc1887b-b968-4c93-b381-167dc1fb4b4d-kube-api-access-h5pwr\") pod \"crc-debug-hw5qg\" (UID: \"8dc1887b-b968-4c93-b381-167dc1fb4b4d\") " pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.346407 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8dc1887b-b968-4c93-b381-167dc1fb4b4d-host\") pod \"crc-debug-hw5qg\" (UID: \"8dc1887b-b968-4c93-b381-167dc1fb4b4d\") " pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.449131 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5pwr\" (UniqueName: \"kubernetes.io/projected/8dc1887b-b968-4c93-b381-167dc1fb4b4d-kube-api-access-h5pwr\") pod \"crc-debug-hw5qg\" (UID: \"8dc1887b-b968-4c93-b381-167dc1fb4b4d\") " pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.449447 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8dc1887b-b968-4c93-b381-167dc1fb4b4d-host\") pod \"crc-debug-hw5qg\" (UID: \"8dc1887b-b968-4c93-b381-167dc1fb4b4d\") " pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.449739 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8dc1887b-b968-4c93-b381-167dc1fb4b4d-host\") pod \"crc-debug-hw5qg\" (UID: \"8dc1887b-b968-4c93-b381-167dc1fb4b4d\") " pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.471498 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5pwr\" (UniqueName: \"kubernetes.io/projected/8dc1887b-b968-4c93-b381-167dc1fb4b4d-kube-api-access-h5pwr\") pod \"crc-debug-hw5qg\" (UID: \"8dc1887b-b968-4c93-b381-167dc1fb4b4d\") " pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.590747 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.958337 4708 generic.go:334] "Generic (PLEG): container finished" podID="8dc1887b-b968-4c93-b381-167dc1fb4b4d" containerID="ddaf45b0828c7a5c78e29f018f6fc14b8606828dbbbeb371affb36d71e260dff" exitCode=0 Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.958427 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" event={"ID":"8dc1887b-b968-4c93-b381-167dc1fb4b4d","Type":"ContainerDied","Data":"ddaf45b0828c7a5c78e29f018f6fc14b8606828dbbbeb371affb36d71e260dff"} Feb 03 07:39:47 crc kubenswrapper[4708]: I0203 07:39:47.959267 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" event={"ID":"8dc1887b-b968-4c93-b381-167dc1fb4b4d","Type":"ContainerStarted","Data":"025ddc69208fc2e4d651b274475ee6b608476772754130add8b8fa28ffe60edd"} Feb 03 07:39:48 crc kubenswrapper[4708]: I0203 07:39:48.104357 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31a404b5-88e2-4a53-881d-cd50e03b84c0" path="/var/lib/kubelet/pods/31a404b5-88e2-4a53-881d-cd50e03b84c0/volumes" Feb 03 07:39:48 crc kubenswrapper[4708]: I0203 07:39:48.319681 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zh6nf/crc-debug-hw5qg"] Feb 03 07:39:48 crc kubenswrapper[4708]: I0203 07:39:48.333038 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zh6nf/crc-debug-hw5qg"] Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.090089 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.178383 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5pwr\" (UniqueName: \"kubernetes.io/projected/8dc1887b-b968-4c93-b381-167dc1fb4b4d-kube-api-access-h5pwr\") pod \"8dc1887b-b968-4c93-b381-167dc1fb4b4d\" (UID: \"8dc1887b-b968-4c93-b381-167dc1fb4b4d\") " Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.178486 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8dc1887b-b968-4c93-b381-167dc1fb4b4d-host\") pod \"8dc1887b-b968-4c93-b381-167dc1fb4b4d\" (UID: \"8dc1887b-b968-4c93-b381-167dc1fb4b4d\") " Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.178863 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc1887b-b968-4c93-b381-167dc1fb4b4d-host" (OuterVolumeSpecName: "host") pod "8dc1887b-b968-4c93-b381-167dc1fb4b4d" (UID: "8dc1887b-b968-4c93-b381-167dc1fb4b4d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.179565 4708 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8dc1887b-b968-4c93-b381-167dc1fb4b4d-host\") on node \"crc\" DevicePath \"\"" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.184109 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dc1887b-b968-4c93-b381-167dc1fb4b4d-kube-api-access-h5pwr" (OuterVolumeSpecName: "kube-api-access-h5pwr") pod "8dc1887b-b968-4c93-b381-167dc1fb4b4d" (UID: "8dc1887b-b968-4c93-b381-167dc1fb4b4d"). InnerVolumeSpecName "kube-api-access-h5pwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.281954 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5pwr\" (UniqueName: \"kubernetes.io/projected/8dc1887b-b968-4c93-b381-167dc1fb4b4d-kube-api-access-h5pwr\") on node \"crc\" DevicePath \"\"" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.541493 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zh6nf/crc-debug-52kzf"] Feb 03 07:39:49 crc kubenswrapper[4708]: E0203 07:39:49.543106 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc1887b-b968-4c93-b381-167dc1fb4b4d" containerName="container-00" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.543209 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc1887b-b968-4c93-b381-167dc1fb4b4d" containerName="container-00" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.543470 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc1887b-b968-4c93-b381-167dc1fb4b4d" containerName="container-00" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.544210 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-52kzf" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.688300 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn5rz\" (UniqueName: \"kubernetes.io/projected/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-kube-api-access-fn5rz\") pod \"crc-debug-52kzf\" (UID: \"3b3634f6-5fc6-4481-9fe4-e554ad34fe98\") " pod="openshift-must-gather-zh6nf/crc-debug-52kzf" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.688419 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-host\") pod \"crc-debug-52kzf\" (UID: \"3b3634f6-5fc6-4481-9fe4-e554ad34fe98\") " pod="openshift-must-gather-zh6nf/crc-debug-52kzf" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.789673 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-host\") pod \"crc-debug-52kzf\" (UID: \"3b3634f6-5fc6-4481-9fe4-e554ad34fe98\") " pod="openshift-must-gather-zh6nf/crc-debug-52kzf" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.789775 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-host\") pod \"crc-debug-52kzf\" (UID: \"3b3634f6-5fc6-4481-9fe4-e554ad34fe98\") " pod="openshift-must-gather-zh6nf/crc-debug-52kzf" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.790167 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn5rz\" (UniqueName: \"kubernetes.io/projected/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-kube-api-access-fn5rz\") pod \"crc-debug-52kzf\" (UID: \"3b3634f6-5fc6-4481-9fe4-e554ad34fe98\") " pod="openshift-must-gather-zh6nf/crc-debug-52kzf" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.812240 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn5rz\" (UniqueName: \"kubernetes.io/projected/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-kube-api-access-fn5rz\") pod \"crc-debug-52kzf\" (UID: \"3b3634f6-5fc6-4481-9fe4-e554ad34fe98\") " pod="openshift-must-gather-zh6nf/crc-debug-52kzf" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.868188 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-52kzf" Feb 03 07:39:49 crc kubenswrapper[4708]: W0203 07:39:49.891709 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b3634f6_5fc6_4481_9fe4_e554ad34fe98.slice/crio-76fe5befd546433b73eb4a8edbd37a8ca3ee54c3d194712926ab8f434eea22b5 WatchSource:0}: Error finding container 76fe5befd546433b73eb4a8edbd37a8ca3ee54c3d194712926ab8f434eea22b5: Status 404 returned error can't find the container with id 76fe5befd546433b73eb4a8edbd37a8ca3ee54c3d194712926ab8f434eea22b5 Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.978717 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/crc-debug-52kzf" event={"ID":"3b3634f6-5fc6-4481-9fe4-e554ad34fe98","Type":"ContainerStarted","Data":"76fe5befd546433b73eb4a8edbd37a8ca3ee54c3d194712926ab8f434eea22b5"} Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.981150 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="025ddc69208fc2e4d651b274475ee6b608476772754130add8b8fa28ffe60edd" Feb 03 07:39:49 crc kubenswrapper[4708]: I0203 07:39:49.981211 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-hw5qg" Feb 03 07:39:50 crc kubenswrapper[4708]: I0203 07:39:50.116941 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dc1887b-b968-4c93-b381-167dc1fb4b4d" path="/var/lib/kubelet/pods/8dc1887b-b968-4c93-b381-167dc1fb4b4d/volumes" Feb 03 07:39:50 crc kubenswrapper[4708]: I0203 07:39:50.990526 4708 generic.go:334] "Generic (PLEG): container finished" podID="3b3634f6-5fc6-4481-9fe4-e554ad34fe98" containerID="481ee7eb9f4d13dace8e11f70535ece178b199fbfab8547fcd90cbdb33d013bc" exitCode=0 Feb 03 07:39:50 crc kubenswrapper[4708]: I0203 07:39:50.990569 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/crc-debug-52kzf" event={"ID":"3b3634f6-5fc6-4481-9fe4-e554ad34fe98","Type":"ContainerDied","Data":"481ee7eb9f4d13dace8e11f70535ece178b199fbfab8547fcd90cbdb33d013bc"} Feb 03 07:39:51 crc kubenswrapper[4708]: I0203 07:39:51.028423 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zh6nf/crc-debug-52kzf"] Feb 03 07:39:51 crc kubenswrapper[4708]: I0203 07:39:51.036741 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zh6nf/crc-debug-52kzf"] Feb 03 07:39:52 crc kubenswrapper[4708]: I0203 07:39:52.106913 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-52kzf" Feb 03 07:39:52 crc kubenswrapper[4708]: I0203 07:39:52.247340 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-host\") pod \"3b3634f6-5fc6-4481-9fe4-e554ad34fe98\" (UID: \"3b3634f6-5fc6-4481-9fe4-e554ad34fe98\") " Feb 03 07:39:52 crc kubenswrapper[4708]: I0203 07:39:52.247484 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-host" (OuterVolumeSpecName: "host") pod "3b3634f6-5fc6-4481-9fe4-e554ad34fe98" (UID: "3b3634f6-5fc6-4481-9fe4-e554ad34fe98"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:39:52 crc kubenswrapper[4708]: I0203 07:39:52.247623 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fn5rz\" (UniqueName: \"kubernetes.io/projected/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-kube-api-access-fn5rz\") pod \"3b3634f6-5fc6-4481-9fe4-e554ad34fe98\" (UID: \"3b3634f6-5fc6-4481-9fe4-e554ad34fe98\") " Feb 03 07:39:52 crc kubenswrapper[4708]: I0203 07:39:52.248308 4708 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-host\") on node \"crc\" DevicePath \"\"" Feb 03 07:39:52 crc kubenswrapper[4708]: I0203 07:39:52.261981 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-kube-api-access-fn5rz" (OuterVolumeSpecName: "kube-api-access-fn5rz") pod "3b3634f6-5fc6-4481-9fe4-e554ad34fe98" (UID: "3b3634f6-5fc6-4481-9fe4-e554ad34fe98"). InnerVolumeSpecName "kube-api-access-fn5rz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:39:52 crc kubenswrapper[4708]: I0203 07:39:52.350896 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fn5rz\" (UniqueName: \"kubernetes.io/projected/3b3634f6-5fc6-4481-9fe4-e554ad34fe98-kube-api-access-fn5rz\") on node \"crc\" DevicePath \"\"" Feb 03 07:39:53 crc kubenswrapper[4708]: I0203 07:39:53.011736 4708 scope.go:117] "RemoveContainer" containerID="481ee7eb9f4d13dace8e11f70535ece178b199fbfab8547fcd90cbdb33d013bc" Feb 03 07:39:53 crc kubenswrapper[4708]: I0203 07:39:53.011775 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/crc-debug-52kzf" Feb 03 07:39:54 crc kubenswrapper[4708]: I0203 07:39:54.105376 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b3634f6-5fc6-4481-9fe4-e554ad34fe98" path="/var/lib/kubelet/pods/3b3634f6-5fc6-4481-9fe4-e554ad34fe98/volumes" Feb 03 07:39:55 crc kubenswrapper[4708]: I0203 07:39:55.093774 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:39:55 crc kubenswrapper[4708]: E0203 07:39:55.094682 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:39:56 crc kubenswrapper[4708]: I0203 07:39:56.005936 4708 scope.go:117] "RemoveContainer" containerID="974f0cde8509f7cd3fc6bb2cfd2511f059acd4bf5536f8d8a03fab4746d7d185" Feb 03 07:39:56 crc kubenswrapper[4708]: I0203 07:39:56.030363 4708 scope.go:117] "RemoveContainer" containerID="f6fae03f10f324d13308c06f4b0040caf7f12316006f23477d73fc62be486bb9" Feb 03 07:39:56 crc kubenswrapper[4708]: I0203 07:39:56.092724 4708 scope.go:117] "RemoveContainer" containerID="d5c4b67f03d1fe85a8d35d6b12b663f9ca8e7d87391745050db51aafca363b8c" Feb 03 07:39:56 crc kubenswrapper[4708]: I0203 07:39:56.134865 4708 scope.go:117] "RemoveContainer" containerID="9d03a5d725cefee32def34f26713ce294442c88c66d86f4742260f4225207cbf" Feb 03 07:39:56 crc kubenswrapper[4708]: I0203 07:39:56.191710 4708 scope.go:117] "RemoveContainer" containerID="1304aafdabbf3e81599db1cbeae7fc3ce845fd09231bd52d17af31f50ba2b468" Feb 03 07:39:56 crc kubenswrapper[4708]: I0203 07:39:56.230266 4708 scope.go:117] "RemoveContainer" containerID="71846cf9a019d3554b1802eef445ea86d5912773f802d4f6b01863d261e10257" Feb 03 07:39:56 crc kubenswrapper[4708]: I0203 07:39:56.286343 4708 scope.go:117] "RemoveContainer" containerID="666c235470d7c2c9ee98b31cd2eb1e9a24ca7befd94ced667adfe14633fefd25" Feb 03 07:40:08 crc kubenswrapper[4708]: I0203 07:40:08.093177 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:40:08 crc kubenswrapper[4708]: E0203 07:40:08.094056 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:40:08 crc kubenswrapper[4708]: I0203 07:40:08.363434 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6f676fd47d-s9mvl_7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e/barbican-api/0.log" Feb 03 07:40:08 crc kubenswrapper[4708]: I0203 07:40:08.486410 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6f676fd47d-s9mvl_7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e/barbican-api-log/0.log" Feb 03 07:40:08 crc kubenswrapper[4708]: I0203 07:40:08.578863 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-db7889686-b8tst_4e40bc4a-7f69-410e-b310-4cb12a8a7f58/barbican-keystone-listener/0.log" Feb 03 07:40:08 crc kubenswrapper[4708]: I0203 07:40:08.734090 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-db7889686-b8tst_4e40bc4a-7f69-410e-b310-4cb12a8a7f58/barbican-keystone-listener-log/0.log" Feb 03 07:40:08 crc kubenswrapper[4708]: I0203 07:40:08.786375 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6cdbf88dd5-z8pqs_00f8a942-b096-49d1-b020-c1aa13eb42c4/barbican-worker/0.log" Feb 03 07:40:08 crc kubenswrapper[4708]: I0203 07:40:08.817088 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6cdbf88dd5-z8pqs_00f8a942-b096-49d1-b020-c1aa13eb42c4/barbican-worker-log/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.011018 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_11c88b16-1207-482e-af23-035b4b973d3b/ceilometer-central-agent/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.019446 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_11c88b16-1207-482e-af23-035b4b973d3b/ceilometer-notification-agent/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.028742 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_11c88b16-1207-482e-af23-035b4b973d3b/proxy-httpd/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.160470 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_11c88b16-1207-482e-af23-035b4b973d3b/sg-core/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.209854 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e8626bc3-c20f-47d2-b183-9d27e9ec814c/cinder-api-log/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.265627 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e8626bc3-c20f-47d2-b183-9d27e9ec814c/cinder-api/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.469762 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_55a84e3b-6f9a-44d0-b059-2a4c842810dc/cinder-scheduler/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.501951 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_55a84e3b-6f9a-44d0-b059-2a4c842810dc/probe/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.684848 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-86mmh_b0112ed3-3c81-4e21-ae47-89c473987dec/init/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.913208 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-86mmh_b0112ed3-3c81-4e21-ae47-89c473987dec/dnsmasq-dns/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.925705 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a1cfcb85-5e57-43d2-8255-4be0c18d60f0/glance-httpd/0.log" Feb 03 07:40:09 crc kubenswrapper[4708]: I0203 07:40:09.935697 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-86mmh_b0112ed3-3c81-4e21-ae47-89c473987dec/init/0.log" Feb 03 07:40:10 crc kubenswrapper[4708]: I0203 07:40:10.074307 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bqd2z"] Feb 03 07:40:10 crc kubenswrapper[4708]: I0203 07:40:10.089067 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-bqd2z"] Feb 03 07:40:10 crc kubenswrapper[4708]: I0203 07:40:10.107242 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5def47d5-3c2f-4cfb-acc1-63b2c12e5e98" path="/var/lib/kubelet/pods/5def47d5-3c2f-4cfb-acc1-63b2c12e5e98/volumes" Feb 03 07:40:10 crc kubenswrapper[4708]: I0203 07:40:10.328681 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6/glance-httpd/0.log" Feb 03 07:40:10 crc kubenswrapper[4708]: I0203 07:40:10.371278 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a1cfcb85-5e57-43d2-8255-4be0c18d60f0/glance-log/0.log" Feb 03 07:40:10 crc kubenswrapper[4708]: I0203 07:40:10.377152 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6/glance-log/0.log" Feb 03 07:40:10 crc kubenswrapper[4708]: I0203 07:40:10.544716 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-5fffdc6c76-m5s5d_e7212cfb-233f-4a09-ae76-fcfe61a4ed14/init/0.log" Feb 03 07:40:10 crc kubenswrapper[4708]: I0203 07:40:10.784746 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-5fffdc6c76-m5s5d_e7212cfb-233f-4a09-ae76-fcfe61a4ed14/ironic-api-log/0.log" Feb 03 07:40:10 crc kubenswrapper[4708]: I0203 07:40:10.795399 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-5fffdc6c76-m5s5d_e7212cfb-233f-4a09-ae76-fcfe61a4ed14/init/0.log" Feb 03 07:40:10 crc kubenswrapper[4708]: I0203 07:40:10.819070 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-5fffdc6c76-m5s5d_e7212cfb-233f-4a09-ae76-fcfe61a4ed14/ironic-api/0.log" Feb 03 07:40:11 crc kubenswrapper[4708]: I0203 07:40:11.029636 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/init/0.log" Feb 03 07:40:11 crc kubenswrapper[4708]: I0203 07:40:11.252135 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/init/0.log" Feb 03 07:40:11 crc kubenswrapper[4708]: I0203 07:40:11.319851 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ironic-python-agent-init/0.log" Feb 03 07:40:11 crc kubenswrapper[4708]: I0203 07:40:11.324653 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ironic-python-agent-init/0.log" Feb 03 07:40:11 crc kubenswrapper[4708]: I0203 07:40:11.546839 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/init/0.log" Feb 03 07:40:11 crc kubenswrapper[4708]: I0203 07:40:11.608241 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ironic-python-agent-init/0.log" Feb 03 07:40:12 crc kubenswrapper[4708]: I0203 07:40:12.020423 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/init/0.log" Feb 03 07:40:12 crc kubenswrapper[4708]: I0203 07:40:12.227738 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ironic-python-agent-init/0.log" Feb 03 07:40:12 crc kubenswrapper[4708]: I0203 07:40:12.383335 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/pxe-init/0.log" Feb 03 07:40:12 crc kubenswrapper[4708]: I0203 07:40:12.638257 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/httpboot/0.log" Feb 03 07:40:12 crc kubenswrapper[4708]: I0203 07:40:12.735094 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ironic-conductor/0.log" Feb 03 07:40:12 crc kubenswrapper[4708]: I0203 07:40:12.838579 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ramdisk-logs/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.056714 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-8w6c7_ec5c8613-c88b-4cc5-8ad4-440e65523618/init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.162046 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/pxe-init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.221325 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/pxe-init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.273628 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-8w6c7_ec5c8613-c88b-4cc5-8ad4-440e65523618/init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.308336 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-8w6c7_ec5c8613-c88b-4cc5-8ad4-440e65523618/ironic-db-sync/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.486272 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-python-agent-init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.548254 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/pxe-init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.659405 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-python-agent-init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.665001 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/inspector-pxe-init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.668035 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/inspector-pxe-init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.860116 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-python-agent-init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.886487 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/inspector-httpboot/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.940961 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-inspector/1.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.965593 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/inspector-pxe-init/0.log" Feb 03 07:40:13 crc kubenswrapper[4708]: I0203 07:40:13.980756 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-inspector/0.log" Feb 03 07:40:14 crc kubenswrapper[4708]: I0203 07:40:14.086224 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-inspector-httpd/0.log" Feb 03 07:40:14 crc kubenswrapper[4708]: I0203 07:40:14.120206 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ramdisk-logs/0.log" Feb 03 07:40:14 crc kubenswrapper[4708]: I0203 07:40:14.191081 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-db-sync-v45nm_74d486e0-cafe-4001-a817-dea3959bb928/ironic-inspector-db-sync/0.log" Feb 03 07:40:14 crc kubenswrapper[4708]: I0203 07:40:14.371632 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-neutron-agent-95b7948fb-x2nkv_aeb72dfd-3f7b-41fa-882f-3290c463fcbe/ironic-neutron-agent/1.log" Feb 03 07:40:14 crc kubenswrapper[4708]: I0203 07:40:14.481454 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-neutron-agent-95b7948fb-x2nkv_aeb72dfd-3f7b-41fa-882f-3290c463fcbe/ironic-neutron-agent/2.log" Feb 03 07:40:14 crc kubenswrapper[4708]: I0203 07:40:14.608982 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7ddbc898b8-cqt5j_11f8a75b-7b47-4838-9751-5a03516154e7/keystone-api/0.log" Feb 03 07:40:14 crc kubenswrapper[4708]: I0203 07:40:14.660595 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697/kube-state-metrics/0.log" Feb 03 07:40:14 crc kubenswrapper[4708]: I0203 07:40:14.884775 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-59974d4f4f-tvqgn_49513617-6e38-4ae1-ae96-b74bf325d19a/neutron-httpd/0.log" Feb 03 07:40:15 crc kubenswrapper[4708]: I0203 07:40:15.044218 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-59974d4f4f-tvqgn_49513617-6e38-4ae1-ae96-b74bf325d19a/neutron-api/0.log" Feb 03 07:40:15 crc kubenswrapper[4708]: I0203 07:40:15.209344 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0dead86a-ae50-4e2d-b917-c23cf0a6bf6c/nova-api-log/0.log" Feb 03 07:40:15 crc kubenswrapper[4708]: I0203 07:40:15.287301 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-cell-mapping-6gqmd_741b999e-f896-4960-8f9c-e9b4aade9039/nova-manage/0.log" Feb 03 07:40:15 crc kubenswrapper[4708]: I0203 07:40:15.311966 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0dead86a-ae50-4e2d-b917-c23cf0a6bf6c/nova-api-api/0.log" Feb 03 07:40:15 crc kubenswrapper[4708]: I0203 07:40:15.594482 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_58ec9777-9aec-4d92-a3a5-6266f6288046/nova-cell0-conductor-conductor/0.log" Feb 03 07:40:15 crc kubenswrapper[4708]: I0203 07:40:15.716059 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-cell-mapping-57zpw_e55db381-d6ba-4d52-981a-918aacdedc97/nova-manage/0.log" Feb 03 07:40:15 crc kubenswrapper[4708]: I0203 07:40:15.943036 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-db-sync-5wzvm_65d33071-644f-4642-a6b3-ee141d7d6360/nova-cell1-conductor-db-sync/0.log" Feb 03 07:40:15 crc kubenswrapper[4708]: I0203 07:40:15.977897 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_209b7389-309a-47b9-bc02-7f7567848b8f/nova-cell1-conductor-conductor/0.log" Feb 03 07:40:16 crc kubenswrapper[4708]: I0203 07:40:16.229706 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_fde60da0-56e9-4d52-b602-8060c10dfb5a/nova-cell1-novncproxy-novncproxy/0.log" Feb 03 07:40:16 crc kubenswrapper[4708]: I0203 07:40:16.348764 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_999ecc72-71e3-4f11-910a-27bd07aa4a05/nova-metadata-log/0.log" Feb 03 07:40:16 crc kubenswrapper[4708]: I0203 07:40:16.611286 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_5f68c9ee-6d86-4dad-b9cb-1a22c7afd031/nova-scheduler-scheduler/0.log" Feb 03 07:40:16 crc kubenswrapper[4708]: I0203 07:40:16.659612 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_999ecc72-71e3-4f11-910a-27bd07aa4a05/nova-metadata-metadata/0.log" Feb 03 07:40:16 crc kubenswrapper[4708]: I0203 07:40:16.748731 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8484d145-abd4-4112-b81c-338bf4d9285f/mysql-bootstrap/0.log" Feb 03 07:40:16 crc kubenswrapper[4708]: I0203 07:40:16.907686 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8484d145-abd4-4112-b81c-338bf4d9285f/mysql-bootstrap/0.log" Feb 03 07:40:16 crc kubenswrapper[4708]: I0203 07:40:16.913486 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8484d145-abd4-4112-b81c-338bf4d9285f/galera/0.log" Feb 03 07:40:17 crc kubenswrapper[4708]: I0203 07:40:17.045953 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5f6fa285-4374-4be5-b4cf-e3dd8ef56762/mysql-bootstrap/0.log" Feb 03 07:40:17 crc kubenswrapper[4708]: I0203 07:40:17.251052 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5f6fa285-4374-4be5-b4cf-e3dd8ef56762/mysql-bootstrap/0.log" Feb 03 07:40:17 crc kubenswrapper[4708]: I0203 07:40:17.275950 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_db624ad8-1c0f-4100-b3a2-4c80e02c1b03/openstackclient/0.log" Feb 03 07:40:17 crc kubenswrapper[4708]: I0203 07:40:17.277020 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5f6fa285-4374-4be5-b4cf-e3dd8ef56762/galera/0.log" Feb 03 07:40:17 crc kubenswrapper[4708]: I0203 07:40:17.506644 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-44cl7_2acc0e69-490c-4b5c-8486-bf0fd3fb6316/openstack-network-exporter/0.log" Feb 03 07:40:17 crc kubenswrapper[4708]: I0203 07:40:17.631433 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-48bcs_6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024/ovsdb-server-init/0.log" Feb 03 07:40:17 crc kubenswrapper[4708]: I0203 07:40:17.830365 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-48bcs_6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024/ovs-vswitchd/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.070540 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-48bcs_6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024/ovsdb-server-init/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.074314 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-48bcs_6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024/ovsdb-server/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.264707 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d1a2b7e5-23d7-48f6-b144-d575da1e613d/openstack-network-exporter/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.295633 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-pb4xp_3b5a2d58-5ebb-4838-a798-bc280fe99951/ovn-controller/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.357913 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d1a2b7e5-23d7-48f6-b144-d575da1e613d/ovn-northd/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.511747 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6c70c9bb-deb5-45aa-96e6-aea4e711f93a/ovsdbserver-nb/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.581194 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6c70c9bb-deb5-45aa-96e6-aea4e711f93a/openstack-network-exporter/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.729816 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d09d6786-b507-4848-977f-a5e94b77d0ad/ovsdbserver-sb/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.802104 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d09d6786-b507-4848-977f-a5e94b77d0ad/openstack-network-exporter/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.887128 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-67599f68dd-cgvwn_11e8be85-5666-4e3d-8964-b0d554d5b1ef/placement-api/0.log" Feb 03 07:40:18 crc kubenswrapper[4708]: I0203 07:40:18.962638 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-67599f68dd-cgvwn_11e8be85-5666-4e3d-8964-b0d554d5b1ef/placement-log/0.log" Feb 03 07:40:19 crc kubenswrapper[4708]: I0203 07:40:19.107805 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a1eb365e-2bf1-450f-90ae-5ca8f2de2de6/setup-container/0.log" Feb 03 07:40:19 crc kubenswrapper[4708]: I0203 07:40:19.308228 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a1eb365e-2bf1-450f-90ae-5ca8f2de2de6/setup-container/0.log" Feb 03 07:40:19 crc kubenswrapper[4708]: I0203 07:40:19.369153 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a1eb365e-2bf1-450f-90ae-5ca8f2de2de6/rabbitmq/0.log" Feb 03 07:40:19 crc kubenswrapper[4708]: I0203 07:40:19.442468 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_baf187c5-9fe4-4496-8f70-ac916d0bb075/setup-container/0.log" Feb 03 07:40:19 crc kubenswrapper[4708]: I0203 07:40:19.639472 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_baf187c5-9fe4-4496-8f70-ac916d0bb075/setup-container/0.log" Feb 03 07:40:19 crc kubenswrapper[4708]: I0203 07:40:19.657170 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_baf187c5-9fe4-4496-8f70-ac916d0bb075/rabbitmq/0.log" Feb 03 07:40:19 crc kubenswrapper[4708]: I0203 07:40:19.698546 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-85b58fb76c-jldbq_7e6d6f02-2176-4c8f-93c4-cb78832fc2d3/proxy-httpd/0.log" Feb 03 07:40:19 crc kubenswrapper[4708]: I0203 07:40:19.882190 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-85b58fb76c-jldbq_7e6d6f02-2176-4c8f-93c4-cb78832fc2d3/proxy-server/0.log" Feb 03 07:40:19 crc kubenswrapper[4708]: I0203 07:40:19.931852 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-zljlj_fdec39a4-6222-4122-901f-4a6603afc348/swift-ring-rebalance/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.091006 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/account-auditor/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.103997 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/account-reaper/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.175258 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/account-replicator/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.247742 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/account-server/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.310744 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/container-auditor/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.313674 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/container-replicator/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.420788 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/container-server/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.509960 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/container-updater/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.530304 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/object-expirer/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.572996 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/object-auditor/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.620104 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/object-replicator/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.725587 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/object-updater/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.750658 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/object-server/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.763140 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/rsync/0.log" Feb 03 07:40:20 crc kubenswrapper[4708]: I0203 07:40:20.847402 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/swift-recon-cron/0.log" Feb 03 07:40:21 crc kubenswrapper[4708]: I0203 07:40:21.092719 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:40:21 crc kubenswrapper[4708]: E0203 07:40:21.093700 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:40:23 crc kubenswrapper[4708]: I0203 07:40:23.464975 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_98eee8d5-f15e-4add-86d3-d19f15018230/memcached/0.log" Feb 03 07:40:28 crc kubenswrapper[4708]: I0203 07:40:28.036855 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-6gqmd"] Feb 03 07:40:28 crc kubenswrapper[4708]: I0203 07:40:28.048253 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-6gqmd"] Feb 03 07:40:28 crc kubenswrapper[4708]: I0203 07:40:28.103265 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="741b999e-f896-4960-8f9c-e9b4aade9039" path="/var/lib/kubelet/pods/741b999e-f896-4960-8f9c-e9b4aade9039/volumes" Feb 03 07:40:30 crc kubenswrapper[4708]: I0203 07:40:30.035431 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5wzvm"] Feb 03 07:40:30 crc kubenswrapper[4708]: I0203 07:40:30.043247 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-5wzvm"] Feb 03 07:40:30 crc kubenswrapper[4708]: I0203 07:40:30.102746 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65d33071-644f-4642-a6b3-ee141d7d6360" path="/var/lib/kubelet/pods/65d33071-644f-4642-a6b3-ee141d7d6360/volumes" Feb 03 07:40:34 crc kubenswrapper[4708]: I0203 07:40:34.093163 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:40:34 crc kubenswrapper[4708]: E0203 07:40:34.094213 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:40:44 crc kubenswrapper[4708]: I0203 07:40:44.266170 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/util/0.log" Feb 03 07:40:44 crc kubenswrapper[4708]: I0203 07:40:44.441439 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/pull/0.log" Feb 03 07:40:44 crc kubenswrapper[4708]: I0203 07:40:44.478503 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/pull/0.log" Feb 03 07:40:44 crc kubenswrapper[4708]: I0203 07:40:44.581746 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/util/0.log" Feb 03 07:40:44 crc kubenswrapper[4708]: I0203 07:40:44.606198 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/util/0.log" Feb 03 07:40:44 crc kubenswrapper[4708]: I0203 07:40:44.655424 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/pull/0.log" Feb 03 07:40:44 crc kubenswrapper[4708]: I0203 07:40:44.716778 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/extract/0.log" Feb 03 07:40:44 crc kubenswrapper[4708]: I0203 07:40:44.862058 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-fc589b45f-mqk95_fe83b4e1-7562-495b-99bc-aa5d1202881c/manager/0.log" Feb 03 07:40:45 crc kubenswrapper[4708]: I0203 07:40:45.091584 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-8f4c5cb64-d2ddp_bdd317ff-3849-4a28-9640-dd4611b86599/manager/0.log" Feb 03 07:40:45 crc kubenswrapper[4708]: I0203 07:40:45.450339 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5d77f4dbc9-7vww8_0b3f5149-6624-450b-b3bd-be0d0ca78c73/manager/0.log" Feb 03 07:40:45 crc kubenswrapper[4708]: I0203 07:40:45.453406 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-65dc6c8d9c-bgm44_794426b0-cf19-43ff-957e-3413c77f0570/manager/0.log" Feb 03 07:40:45 crc kubenswrapper[4708]: I0203 07:40:45.737726 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-866f9bb544-m4775_f1edcba9-46e3-49fd-bb48-ba29b86c7bac/manager/0.log" Feb 03 07:40:45 crc kubenswrapper[4708]: I0203 07:40:45.762569 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-gqvwr_35c2f81b-a6df-4f5c-98c9-e9efb7f362b4/manager/0.log" Feb 03 07:40:46 crc kubenswrapper[4708]: I0203 07:40:46.092469 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:40:46 crc kubenswrapper[4708]: E0203 07:40:46.092754 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:40:46 crc kubenswrapper[4708]: I0203 07:40:46.101744 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5d86df5cd7-2pljs_c6a27492-3276-45de-a2d9-1c605152a0b6/manager/0.log" Feb 03 07:40:46 crc kubenswrapper[4708]: I0203 07:40:46.293301 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-64469b487f-kccqb_3e86abc8-c97c-4eef-b181-0d87376edd8f/manager/0.log" Feb 03 07:40:46 crc kubenswrapper[4708]: I0203 07:40:46.306491 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7775d87d9d-gpdwp_d6260b8b-c5f5-4803-8305-0b14903926c9/manager/0.log" Feb 03 07:40:46 crc kubenswrapper[4708]: I0203 07:40:46.324320 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-btqlz_9f166dd2-52e4-473c-9168-c065582fa0e4/manager/0.log" Feb 03 07:40:46 crc kubenswrapper[4708]: I0203 07:40:46.546602 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-gl2lx_3b0ccfa3-4ef3-4e3c-9127-59e1abc6631d/manager/0.log" Feb 03 07:40:46 crc kubenswrapper[4708]: I0203 07:40:46.551738 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-576995988b-tdxnl_7d3cec4a-da6f-431a-98d7-c4784bb248bc/manager/0.log" Feb 03 07:40:46 crc kubenswrapper[4708]: I0203 07:40:46.740513 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5644b66645-m7mbt_802333ba-2384-4688-b939-28cbfda8bfc1/manager/0.log" Feb 03 07:40:47 crc kubenswrapper[4708]: I0203 07:40:47.003483 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62_f6f38306-d4b2-46fa-9c49-8ac276362db8/manager/0.log" Feb 03 07:40:47 crc kubenswrapper[4708]: I0203 07:40:47.550128 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-d8cb6cd6c-cknw2_bc5c0cc4-1640-4e46-86b7-ed4ce809d4aa/operator/0.log" Feb 03 07:40:47 crc kubenswrapper[4708]: I0203 07:40:47.776428 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-pdxl8_a6e65791-8332-41fb-aac6-d17c3ac9d6f6/registry-server/0.log" Feb 03 07:40:47 crc kubenswrapper[4708]: I0203 07:40:47.836807 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7b89ddb58-x6prm_240226bb-f320-4bd5-87ad-1d219c9e61e7/manager/0.log" Feb 03 07:40:48 crc kubenswrapper[4708]: I0203 07:40:48.237266 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-k5zbb_7d53946e-45e4-4abe-b4e7-d64339fdedd3/manager/0.log" Feb 03 07:40:48 crc kubenswrapper[4708]: I0203 07:40:48.252459 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-79f7df8fc4-bhfdd_b5a7b398-66a9-4c39-a940-631bcc804dfe/manager/0.log" Feb 03 07:40:48 crc kubenswrapper[4708]: I0203 07:40:48.253606 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-mlxcw_6cee24b4-302f-48db-badb-39bcab5756d9/manager/0.log" Feb 03 07:40:48 crc kubenswrapper[4708]: I0203 07:40:48.491080 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-mtgqj_3841da74-e9f4-4f19-ae3c-66e117029c51/operator/0.log" Feb 03 07:40:48 crc kubenswrapper[4708]: I0203 07:40:48.633814 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-7b89fdf75b-94vx4_ac5a5419-6887-45ea-944d-1c8f51816492/manager/0.log" Feb 03 07:40:48 crc kubenswrapper[4708]: I0203 07:40:48.684328 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-rxvkg_1d0931b6-6d69-4702-9b8c-93f1a6600bbe/manager/0.log" Feb 03 07:40:48 crc kubenswrapper[4708]: I0203 07:40:48.932177 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-565849b54-rwgmc_faade3fc-fd45-4bcf-8aa5-0b0a3765581f/manager/0.log" Feb 03 07:40:49 crc kubenswrapper[4708]: I0203 07:40:49.005110 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-586b95b788-gldzv_5c9c90e2-345b-4a13-9acc-6e4d98113779/manager/0.log" Feb 03 07:40:56 crc kubenswrapper[4708]: I0203 07:40:56.534636 4708 scope.go:117] "RemoveContainer" containerID="a610cfdfca03aa0f3bcd8b6afc1f403d4b5f731f7deeac468e62aec0f615bfdf" Feb 03 07:40:56 crc kubenswrapper[4708]: I0203 07:40:56.581931 4708 scope.go:117] "RemoveContainer" containerID="63f582bf7f5d415f591a3d0afd8cae298bcaca5ffc6726f98dedc2945f289bad" Feb 03 07:40:56 crc kubenswrapper[4708]: I0203 07:40:56.623405 4708 scope.go:117] "RemoveContainer" containerID="90f4dedde762dba325f889559509a4ef78df8e437581ea0d6e7ce7f143aab114" Feb 03 07:40:59 crc kubenswrapper[4708]: I0203 07:40:59.092773 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:40:59 crc kubenswrapper[4708]: E0203 07:40:59.094202 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:41:07 crc kubenswrapper[4708]: I0203 07:41:07.560220 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-p5qw9_915701ce-919a-4743-b390-fa72105516e1/control-plane-machine-set-operator/0.log" Feb 03 07:41:07 crc kubenswrapper[4708]: I0203 07:41:07.781913 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9fjv2_a34c5792-5895-4d08-9e7e-b3948f5be096/kube-rbac-proxy/0.log" Feb 03 07:41:07 crc kubenswrapper[4708]: I0203 07:41:07.827034 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9fjv2_a34c5792-5895-4d08-9e7e-b3948f5be096/machine-api-operator/0.log" Feb 03 07:41:10 crc kubenswrapper[4708]: I0203 07:41:10.093228 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:41:10 crc kubenswrapper[4708]: E0203 07:41:10.093785 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:41:13 crc kubenswrapper[4708]: I0203 07:41:13.042517 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-57zpw"] Feb 03 07:41:13 crc kubenswrapper[4708]: I0203 07:41:13.053025 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-57zpw"] Feb 03 07:41:14 crc kubenswrapper[4708]: I0203 07:41:14.105354 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e55db381-d6ba-4d52-981a-918aacdedc97" path="/var/lib/kubelet/pods/e55db381-d6ba-4d52-981a-918aacdedc97/volumes" Feb 03 07:41:21 crc kubenswrapper[4708]: I0203 07:41:21.071132 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-jpxrc_59a903ed-9dad-488c-b531-cbe96052d31b/cert-manager-controller/0.log" Feb 03 07:41:21 crc kubenswrapper[4708]: I0203 07:41:21.221992 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-xfwvc_4df04764-d566-42a4-b7f4-af82a04b3fc3/cert-manager-cainjector/0.log" Feb 03 07:41:21 crc kubenswrapper[4708]: I0203 07:41:21.263291 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-j4njw_4b1cdb17-b07e-4d3a-86fd-418361057f9d/cert-manager-webhook/0.log" Feb 03 07:41:24 crc kubenswrapper[4708]: I0203 07:41:24.093659 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:41:24 crc kubenswrapper[4708]: E0203 07:41:24.094289 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:41:34 crc kubenswrapper[4708]: I0203 07:41:34.723876 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-nwm6n_5c6dbe91-1ee1-4629-bbee-e661af990956/nmstate-console-plugin/0.log" Feb 03 07:41:34 crc kubenswrapper[4708]: I0203 07:41:34.957179 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-ls24k_09450ee3-4732-4c81-8bf1-cca9c8d8fdc6/nmstate-handler/0.log" Feb 03 07:41:35 crc kubenswrapper[4708]: I0203 07:41:35.072399 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-vrkbt_f3e72a2c-73aa-410e-8386-1a2e6b510d4f/kube-rbac-proxy/0.log" Feb 03 07:41:35 crc kubenswrapper[4708]: I0203 07:41:35.107792 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-vrkbt_f3e72a2c-73aa-410e-8386-1a2e6b510d4f/nmstate-metrics/0.log" Feb 03 07:41:35 crc kubenswrapper[4708]: I0203 07:41:35.201445 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-jx7x5_3396f4c1-fb82-428b-bde6-0f30b8bf6c59/nmstate-operator/0.log" Feb 03 07:41:35 crc kubenswrapper[4708]: I0203 07:41:35.347684 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-b725x_4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e/nmstate-webhook/0.log" Feb 03 07:41:39 crc kubenswrapper[4708]: I0203 07:41:39.092619 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:41:39 crc kubenswrapper[4708]: E0203 07:41:39.093335 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:41:50 crc kubenswrapper[4708]: I0203 07:41:50.093334 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:41:50 crc kubenswrapper[4708]: E0203 07:41:50.094369 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:41:56 crc kubenswrapper[4708]: I0203 07:41:56.726539 4708 scope.go:117] "RemoveContainer" containerID="b631ce6336dd3b267f6a03ac6dbed496ef56579e7337329f48407162177f45e4" Feb 03 07:42:01 crc kubenswrapper[4708]: I0203 07:42:01.094790 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:42:01 crc kubenswrapper[4708]: E0203 07:42:01.096051 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.099101 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-cbjxc_fae1d180-0dfb-4f07-922c-5b158d2ebcd3/kube-rbac-proxy/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.154947 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-cbjxc_fae1d180-0dfb-4f07-922c-5b158d2ebcd3/controller/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.326294 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-frr-files/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.450641 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-frr-files/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.482206 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-metrics/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.508935 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-reloader/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.516832 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-reloader/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.691941 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-reloader/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.695578 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-frr-files/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.727996 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-metrics/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.743098 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-metrics/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.886266 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-reloader/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.895436 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/controller/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.914026 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-frr-files/0.log" Feb 03 07:42:03 crc kubenswrapper[4708]: I0203 07:42:03.919886 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-metrics/0.log" Feb 03 07:42:04 crc kubenswrapper[4708]: I0203 07:42:04.073379 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/frr-metrics/0.log" Feb 03 07:42:04 crc kubenswrapper[4708]: I0203 07:42:04.086563 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/kube-rbac-proxy-frr/0.log" Feb 03 07:42:04 crc kubenswrapper[4708]: I0203 07:42:04.116231 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/kube-rbac-proxy/0.log" Feb 03 07:42:04 crc kubenswrapper[4708]: I0203 07:42:04.335058 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/reloader/0.log" Feb 03 07:42:04 crc kubenswrapper[4708]: I0203 07:42:04.365843 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-xs54b_20c5389c-b542-4620-ac99-0ecfb0ae7720/frr-k8s-webhook-server/0.log" Feb 03 07:42:04 crc kubenswrapper[4708]: I0203 07:42:04.557205 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7f7b788fc7-dmpn2_c67268c0-d17c-4659-829e-2865b70963f0/manager/0.log" Feb 03 07:42:04 crc kubenswrapper[4708]: I0203 07:42:04.747622 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7d65755cd8-mqk5v_85431c28-b637-4bab-b63b-982307ac860c/webhook-server/0.log" Feb 03 07:42:04 crc kubenswrapper[4708]: I0203 07:42:04.858371 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hcz88_31740186-1408-414d-86ee-66b5f2219175/kube-rbac-proxy/0.log" Feb 03 07:42:04 crc kubenswrapper[4708]: I0203 07:42:04.978177 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/frr/0.log" Feb 03 07:42:05 crc kubenswrapper[4708]: I0203 07:42:05.282198 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hcz88_31740186-1408-414d-86ee-66b5f2219175/speaker/0.log" Feb 03 07:42:16 crc kubenswrapper[4708]: I0203 07:42:16.093689 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:42:16 crc kubenswrapper[4708]: E0203 07:42:16.094861 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:42:17 crc kubenswrapper[4708]: I0203 07:42:17.462372 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/util/0.log" Feb 03 07:42:17 crc kubenswrapper[4708]: I0203 07:42:17.725474 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/pull/0.log" Feb 03 07:42:17 crc kubenswrapper[4708]: I0203 07:42:17.729638 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/pull/0.log" Feb 03 07:42:17 crc kubenswrapper[4708]: I0203 07:42:17.760281 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/util/0.log" Feb 03 07:42:17 crc kubenswrapper[4708]: I0203 07:42:17.902101 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/extract/0.log" Feb 03 07:42:17 crc kubenswrapper[4708]: I0203 07:42:17.924427 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/pull/0.log" Feb 03 07:42:17 crc kubenswrapper[4708]: I0203 07:42:17.932389 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/util/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.069423 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/util/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.259558 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/util/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.285520 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/pull/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.323401 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/pull/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.440851 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/util/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.451750 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/extract/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.458647 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/pull/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.620155 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-utilities/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.769045 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-content/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.791300 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-utilities/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.820951 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-content/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.973052 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-content/0.log" Feb 03 07:42:18 crc kubenswrapper[4708]: I0203 07:42:18.989806 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-utilities/0.log" Feb 03 07:42:19 crc kubenswrapper[4708]: I0203 07:42:19.178366 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/registry-server/0.log" Feb 03 07:42:19 crc kubenswrapper[4708]: I0203 07:42:19.194517 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-utilities/0.log" Feb 03 07:42:19 crc kubenswrapper[4708]: I0203 07:42:19.385402 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-utilities/0.log" Feb 03 07:42:19 crc kubenswrapper[4708]: I0203 07:42:19.393885 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-content/0.log" Feb 03 07:42:19 crc kubenswrapper[4708]: I0203 07:42:19.402127 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-content/0.log" Feb 03 07:42:19 crc kubenswrapper[4708]: I0203 07:42:19.572086 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-content/0.log" Feb 03 07:42:19 crc kubenswrapper[4708]: I0203 07:42:19.597921 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-utilities/0.log" Feb 03 07:42:19 crc kubenswrapper[4708]: I0203 07:42:19.834806 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-hs7h6_760c2ebf-e516-4db6-a500-d2b897cc96de/marketplace-operator/0.log" Feb 03 07:42:19 crc kubenswrapper[4708]: I0203 07:42:19.859054 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-utilities/0.log" Feb 03 07:42:19 crc kubenswrapper[4708]: I0203 07:42:19.896936 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/registry-server/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.041209 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-utilities/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.061021 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-content/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.117995 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-content/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.296371 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-utilities/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.365375 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-content/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.366115 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/registry-server/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.502039 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-utilities/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.671249 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-content/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.671502 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-content/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.702090 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-utilities/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.864272 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-utilities/0.log" Feb 03 07:42:20 crc kubenswrapper[4708]: I0203 07:42:20.895599 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-content/0.log" Feb 03 07:42:21 crc kubenswrapper[4708]: I0203 07:42:21.261748 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/registry-server/0.log" Feb 03 07:42:31 crc kubenswrapper[4708]: I0203 07:42:31.092925 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:42:31 crc kubenswrapper[4708]: E0203 07:42:31.093555 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:42:45 crc kubenswrapper[4708]: I0203 07:42:45.095080 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:42:45 crc kubenswrapper[4708]: E0203 07:42:45.095976 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:42:57 crc kubenswrapper[4708]: I0203 07:42:57.093284 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:42:57 crc kubenswrapper[4708]: E0203 07:42:57.094218 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:43:08 crc kubenswrapper[4708]: I0203 07:43:08.093614 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:43:08 crc kubenswrapper[4708]: E0203 07:43:08.094525 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:43:20 crc kubenswrapper[4708]: I0203 07:43:20.093009 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:43:20 crc kubenswrapper[4708]: E0203 07:43:20.093838 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:43:32 crc kubenswrapper[4708]: I0203 07:43:32.117639 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:43:33 crc kubenswrapper[4708]: I0203 07:43:33.015075 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"e97c1c7a8a378f9a3bb4de9efdba7a03465531ea21b43635da735f593a89eaba"} Feb 03 07:43:56 crc kubenswrapper[4708]: I0203 07:43:56.872607 4708 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-85b58fb76c-jldbq" podUID="7e6d6f02-2176-4c8f-93c4-cb78832fc2d3" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Feb 03 07:44:00 crc kubenswrapper[4708]: I0203 07:44:00.253517 4708 generic.go:334] "Generic (PLEG): container finished" podID="d02e34a2-8944-4d52-bfbf-2d5cccbd0435" containerID="90036df92560178408d0662295a1ac57424b10217653b2f321c4b1cca385bcd1" exitCode=0 Feb 03 07:44:00 crc kubenswrapper[4708]: I0203 07:44:00.253661 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zh6nf/must-gather-lhfm4" event={"ID":"d02e34a2-8944-4d52-bfbf-2d5cccbd0435","Type":"ContainerDied","Data":"90036df92560178408d0662295a1ac57424b10217653b2f321c4b1cca385bcd1"} Feb 03 07:44:00 crc kubenswrapper[4708]: I0203 07:44:00.254607 4708 scope.go:117] "RemoveContainer" containerID="90036df92560178408d0662295a1ac57424b10217653b2f321c4b1cca385bcd1" Feb 03 07:44:00 crc kubenswrapper[4708]: I0203 07:44:00.744251 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zh6nf_must-gather-lhfm4_d02e34a2-8944-4d52-bfbf-2d5cccbd0435/gather/0.log" Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.111955 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zh6nf/must-gather-lhfm4"] Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.112933 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-zh6nf/must-gather-lhfm4" podUID="d02e34a2-8944-4d52-bfbf-2d5cccbd0435" containerName="copy" containerID="cri-o://b737fc60629e7895f4ea0a2481cc8b40beab9e9ec242a4e946ea89e9cdab9581" gracePeriod=2 Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.123279 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zh6nf/must-gather-lhfm4"] Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.341441 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zh6nf_must-gather-lhfm4_d02e34a2-8944-4d52-bfbf-2d5cccbd0435/copy/0.log" Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.342175 4708 generic.go:334] "Generic (PLEG): container finished" podID="d02e34a2-8944-4d52-bfbf-2d5cccbd0435" containerID="b737fc60629e7895f4ea0a2481cc8b40beab9e9ec242a4e946ea89e9cdab9581" exitCode=143 Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.681201 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zh6nf_must-gather-lhfm4_d02e34a2-8944-4d52-bfbf-2d5cccbd0435/copy/0.log" Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.682040 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/must-gather-lhfm4" Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.861640 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-must-gather-output\") pod \"d02e34a2-8944-4d52-bfbf-2d5cccbd0435\" (UID: \"d02e34a2-8944-4d52-bfbf-2d5cccbd0435\") " Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.861759 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt96g\" (UniqueName: \"kubernetes.io/projected/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-kube-api-access-qt96g\") pod \"d02e34a2-8944-4d52-bfbf-2d5cccbd0435\" (UID: \"d02e34a2-8944-4d52-bfbf-2d5cccbd0435\") " Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.867705 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-kube-api-access-qt96g" (OuterVolumeSpecName: "kube-api-access-qt96g") pod "d02e34a2-8944-4d52-bfbf-2d5cccbd0435" (UID: "d02e34a2-8944-4d52-bfbf-2d5cccbd0435"). InnerVolumeSpecName "kube-api-access-qt96g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:44:09 crc kubenswrapper[4708]: I0203 07:44:09.964361 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt96g\" (UniqueName: \"kubernetes.io/projected/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-kube-api-access-qt96g\") on node \"crc\" DevicePath \"\"" Feb 03 07:44:10 crc kubenswrapper[4708]: I0203 07:44:10.005713 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "d02e34a2-8944-4d52-bfbf-2d5cccbd0435" (UID: "d02e34a2-8944-4d52-bfbf-2d5cccbd0435"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:44:10 crc kubenswrapper[4708]: I0203 07:44:10.066385 4708 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d02e34a2-8944-4d52-bfbf-2d5cccbd0435-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 03 07:44:10 crc kubenswrapper[4708]: I0203 07:44:10.105468 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d02e34a2-8944-4d52-bfbf-2d5cccbd0435" path="/var/lib/kubelet/pods/d02e34a2-8944-4d52-bfbf-2d5cccbd0435/volumes" Feb 03 07:44:10 crc kubenswrapper[4708]: I0203 07:44:10.356459 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zh6nf_must-gather-lhfm4_d02e34a2-8944-4d52-bfbf-2d5cccbd0435/copy/0.log" Feb 03 07:44:10 crc kubenswrapper[4708]: I0203 07:44:10.357288 4708 scope.go:117] "RemoveContainer" containerID="b737fc60629e7895f4ea0a2481cc8b40beab9e9ec242a4e946ea89e9cdab9581" Feb 03 07:44:10 crc kubenswrapper[4708]: I0203 07:44:10.357362 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zh6nf/must-gather-lhfm4" Feb 03 07:44:10 crc kubenswrapper[4708]: I0203 07:44:10.383575 4708 scope.go:117] "RemoveContainer" containerID="90036df92560178408d0662295a1ac57424b10217653b2f321c4b1cca385bcd1" Feb 03 07:44:56 crc kubenswrapper[4708]: I0203 07:44:56.833518 4708 scope.go:117] "RemoveContainer" containerID="917ccb593c76edb655ee4b4173185df02e9461312a099c179434e628b8f609ad" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.144998 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng"] Feb 03 07:45:00 crc kubenswrapper[4708]: E0203 07:45:00.145743 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b3634f6-5fc6-4481-9fe4-e554ad34fe98" containerName="container-00" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.145761 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b3634f6-5fc6-4481-9fe4-e554ad34fe98" containerName="container-00" Feb 03 07:45:00 crc kubenswrapper[4708]: E0203 07:45:00.145804 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d02e34a2-8944-4d52-bfbf-2d5cccbd0435" containerName="gather" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.145810 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="d02e34a2-8944-4d52-bfbf-2d5cccbd0435" containerName="gather" Feb 03 07:45:00 crc kubenswrapper[4708]: E0203 07:45:00.145823 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d02e34a2-8944-4d52-bfbf-2d5cccbd0435" containerName="copy" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.145830 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="d02e34a2-8944-4d52-bfbf-2d5cccbd0435" containerName="copy" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.146041 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="d02e34a2-8944-4d52-bfbf-2d5cccbd0435" containerName="copy" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.146056 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b3634f6-5fc6-4481-9fe4-e554ad34fe98" containerName="container-00" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.146074 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="d02e34a2-8944-4d52-bfbf-2d5cccbd0435" containerName="gather" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.146684 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.148645 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.148675 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.154467 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng"] Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.308650 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-config-volume\") pod \"collect-profiles-29501745-cn4ng\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.308812 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-secret-volume\") pod \"collect-profiles-29501745-cn4ng\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.308866 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v45hw\" (UniqueName: \"kubernetes.io/projected/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-kube-api-access-v45hw\") pod \"collect-profiles-29501745-cn4ng\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.410529 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-secret-volume\") pod \"collect-profiles-29501745-cn4ng\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.410600 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v45hw\" (UniqueName: \"kubernetes.io/projected/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-kube-api-access-v45hw\") pod \"collect-profiles-29501745-cn4ng\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.411061 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-config-volume\") pod \"collect-profiles-29501745-cn4ng\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.412081 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-config-volume\") pod \"collect-profiles-29501745-cn4ng\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.416403 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-secret-volume\") pod \"collect-profiles-29501745-cn4ng\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.431584 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v45hw\" (UniqueName: \"kubernetes.io/projected/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-kube-api-access-v45hw\") pod \"collect-profiles-29501745-cn4ng\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.483120 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:00 crc kubenswrapper[4708]: I0203 07:45:00.933910 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng"] Feb 03 07:45:01 crc kubenswrapper[4708]: I0203 07:45:01.833082 4708 generic.go:334] "Generic (PLEG): container finished" podID="0b5aaf72-3a58-42e8-b733-78ccfd5c1baf" containerID="7c4a73449a600f17b8555c5e92cd50978b8b613ff91217dc3504b8b748efa481" exitCode=0 Feb 03 07:45:01 crc kubenswrapper[4708]: I0203 07:45:01.833194 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" event={"ID":"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf","Type":"ContainerDied","Data":"7c4a73449a600f17b8555c5e92cd50978b8b613ff91217dc3504b8b748efa481"} Feb 03 07:45:01 crc kubenswrapper[4708]: I0203 07:45:01.834284 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" event={"ID":"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf","Type":"ContainerStarted","Data":"e1999c759428a48bd21997f5e0873a147693792b2a1cb426bf44612967bde409"} Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.155429 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.280990 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v45hw\" (UniqueName: \"kubernetes.io/projected/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-kube-api-access-v45hw\") pod \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.285123 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-config-volume\") pod \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.285277 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-secret-volume\") pod \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\" (UID: \"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf\") " Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.285709 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-config-volume" (OuterVolumeSpecName: "config-volume") pod "0b5aaf72-3a58-42e8-b733-78ccfd5c1baf" (UID: "0b5aaf72-3a58-42e8-b733-78ccfd5c1baf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.286214 4708 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-config-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.288932 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0b5aaf72-3a58-42e8-b733-78ccfd5c1baf" (UID: "0b5aaf72-3a58-42e8-b733-78ccfd5c1baf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.293274 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-kube-api-access-v45hw" (OuterVolumeSpecName: "kube-api-access-v45hw") pod "0b5aaf72-3a58-42e8-b733-78ccfd5c1baf" (UID: "0b5aaf72-3a58-42e8-b733-78ccfd5c1baf"). InnerVolumeSpecName "kube-api-access-v45hw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.388353 4708 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.388435 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v45hw\" (UniqueName: \"kubernetes.io/projected/0b5aaf72-3a58-42e8-b733-78ccfd5c1baf-kube-api-access-v45hw\") on node \"crc\" DevicePath \"\"" Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.853093 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" event={"ID":"0b5aaf72-3a58-42e8-b733-78ccfd5c1baf","Type":"ContainerDied","Data":"e1999c759428a48bd21997f5e0873a147693792b2a1cb426bf44612967bde409"} Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.853139 4708 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1999c759428a48bd21997f5e0873a147693792b2a1cb426bf44612967bde409" Feb 03 07:45:03 crc kubenswrapper[4708]: I0203 07:45:03.853197 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501745-cn4ng" Feb 03 07:45:04 crc kubenswrapper[4708]: I0203 07:45:04.225186 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc"] Feb 03 07:45:04 crc kubenswrapper[4708]: I0203 07:45:04.232415 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501700-w7xxc"] Feb 03 07:45:06 crc kubenswrapper[4708]: I0203 07:45:06.107005 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ead3a61c-4b09-4f98-866a-1e66ed92d084" path="/var/lib/kubelet/pods/ead3a61c-4b09-4f98-866a-1e66ed92d084/volumes" Feb 03 07:45:53 crc kubenswrapper[4708]: I0203 07:45:53.833508 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:45:53 crc kubenswrapper[4708]: I0203 07:45:53.835210 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:45:56 crc kubenswrapper[4708]: I0203 07:45:56.887411 4708 scope.go:117] "RemoveContainer" containerID="942332b768abde049c122f3277e1c2843ed1c90c11c281184c9c75022ffb48e5" Feb 03 07:45:56 crc kubenswrapper[4708]: I0203 07:45:56.941624 4708 scope.go:117] "RemoveContainer" containerID="ddaf45b0828c7a5c78e29f018f6fc14b8606828dbbbeb371affb36d71e260dff" Feb 03 07:46:23 crc kubenswrapper[4708]: I0203 07:46:23.833601 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:46:23 crc kubenswrapper[4708]: I0203 07:46:23.834344 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.168140 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g4tw9"] Feb 03 07:46:39 crc kubenswrapper[4708]: E0203 07:46:39.169150 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b5aaf72-3a58-42e8-b733-78ccfd5c1baf" containerName="collect-profiles" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.169167 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b5aaf72-3a58-42e8-b733-78ccfd5c1baf" containerName="collect-profiles" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.169426 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b5aaf72-3a58-42e8-b733-78ccfd5c1baf" containerName="collect-profiles" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.171190 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.192133 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g4tw9"] Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.271192 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwc97\" (UniqueName: \"kubernetes.io/projected/e0f3321e-cd96-4baf-bff8-8b906113b59f-kube-api-access-lwc97\") pod \"community-operators-g4tw9\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.271422 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-catalog-content\") pod \"community-operators-g4tw9\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.271496 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-utilities\") pod \"community-operators-g4tw9\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.373224 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwc97\" (UniqueName: \"kubernetes.io/projected/e0f3321e-cd96-4baf-bff8-8b906113b59f-kube-api-access-lwc97\") pod \"community-operators-g4tw9\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.373368 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-catalog-content\") pod \"community-operators-g4tw9\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.373407 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-utilities\") pod \"community-operators-g4tw9\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.373840 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-catalog-content\") pod \"community-operators-g4tw9\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.373954 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-utilities\") pod \"community-operators-g4tw9\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.402891 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwc97\" (UniqueName: \"kubernetes.io/projected/e0f3321e-cd96-4baf-bff8-8b906113b59f-kube-api-access-lwc97\") pod \"community-operators-g4tw9\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:39 crc kubenswrapper[4708]: I0203 07:46:39.506553 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:40 crc kubenswrapper[4708]: I0203 07:46:40.158571 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g4tw9"] Feb 03 07:46:40 crc kubenswrapper[4708]: I0203 07:46:40.772689 4708 generic.go:334] "Generic (PLEG): container finished" podID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerID="8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856" exitCode=0 Feb 03 07:46:40 crc kubenswrapper[4708]: I0203 07:46:40.772740 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4tw9" event={"ID":"e0f3321e-cd96-4baf-bff8-8b906113b59f","Type":"ContainerDied","Data":"8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856"} Feb 03 07:46:40 crc kubenswrapper[4708]: I0203 07:46:40.773055 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4tw9" event={"ID":"e0f3321e-cd96-4baf-bff8-8b906113b59f","Type":"ContainerStarted","Data":"0fbb1b8fc4a9ac4ce300239ba7fa1c6c8e26cbe0cd91453785cd74ff38553f39"} Feb 03 07:46:40 crc kubenswrapper[4708]: I0203 07:46:40.774957 4708 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 03 07:46:41 crc kubenswrapper[4708]: I0203 07:46:41.787849 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4tw9" event={"ID":"e0f3321e-cd96-4baf-bff8-8b906113b59f","Type":"ContainerStarted","Data":"d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf"} Feb 03 07:46:42 crc kubenswrapper[4708]: I0203 07:46:42.797311 4708 generic.go:334] "Generic (PLEG): container finished" podID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerID="d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf" exitCode=0 Feb 03 07:46:42 crc kubenswrapper[4708]: I0203 07:46:42.797353 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4tw9" event={"ID":"e0f3321e-cd96-4baf-bff8-8b906113b59f","Type":"ContainerDied","Data":"d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf"} Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.477668 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jqhrb/must-gather-xxf6t"] Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.479975 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/must-gather-xxf6t" Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.484618 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jqhrb"/"openshift-service-ca.crt" Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.485245 4708 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jqhrb"/"kube-root-ca.crt" Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.494725 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jqhrb/must-gather-xxf6t"] Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.556919 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwrc8\" (UniqueName: \"kubernetes.io/projected/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-kube-api-access-lwrc8\") pod \"must-gather-xxf6t\" (UID: \"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9\") " pod="openshift-must-gather-jqhrb/must-gather-xxf6t" Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.557149 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-must-gather-output\") pod \"must-gather-xxf6t\" (UID: \"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9\") " pod="openshift-must-gather-jqhrb/must-gather-xxf6t" Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.659699 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwrc8\" (UniqueName: \"kubernetes.io/projected/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-kube-api-access-lwrc8\") pod \"must-gather-xxf6t\" (UID: \"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9\") " pod="openshift-must-gather-jqhrb/must-gather-xxf6t" Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.659789 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-must-gather-output\") pod \"must-gather-xxf6t\" (UID: \"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9\") " pod="openshift-must-gather-jqhrb/must-gather-xxf6t" Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.660233 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-must-gather-output\") pod \"must-gather-xxf6t\" (UID: \"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9\") " pod="openshift-must-gather-jqhrb/must-gather-xxf6t" Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.696213 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwrc8\" (UniqueName: \"kubernetes.io/projected/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-kube-api-access-lwrc8\") pod \"must-gather-xxf6t\" (UID: \"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9\") " pod="openshift-must-gather-jqhrb/must-gather-xxf6t" Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.801714 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/must-gather-xxf6t" Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.808984 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4tw9" event={"ID":"e0f3321e-cd96-4baf-bff8-8b906113b59f","Type":"ContainerStarted","Data":"2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86"} Feb 03 07:46:43 crc kubenswrapper[4708]: I0203 07:46:43.826989 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g4tw9" podStartSLOduration=2.415224647 podStartE2EDuration="4.826970907s" podCreationTimestamp="2026-02-03 07:46:39 +0000 UTC" firstStartedPulling="2026-02-03 07:46:40.774700905 +0000 UTC m=+2179.756647712" lastFinishedPulling="2026-02-03 07:46:43.186447125 +0000 UTC m=+2182.168393972" observedRunningTime="2026-02-03 07:46:43.826775902 +0000 UTC m=+2182.808722709" watchObservedRunningTime="2026-02-03 07:46:43.826970907 +0000 UTC m=+2182.808917714" Feb 03 07:46:44 crc kubenswrapper[4708]: I0203 07:46:44.261200 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jqhrb/must-gather-xxf6t"] Feb 03 07:46:44 crc kubenswrapper[4708]: I0203 07:46:44.822873 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/must-gather-xxf6t" event={"ID":"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9","Type":"ContainerStarted","Data":"28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391"} Feb 03 07:46:44 crc kubenswrapper[4708]: I0203 07:46:44.823877 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/must-gather-xxf6t" event={"ID":"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9","Type":"ContainerStarted","Data":"8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1"} Feb 03 07:46:44 crc kubenswrapper[4708]: I0203 07:46:44.823899 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/must-gather-xxf6t" event={"ID":"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9","Type":"ContainerStarted","Data":"35604d1467758bb287afd4370522e58a3380618a6f1eb8f9e71af47b3784f954"} Feb 03 07:46:45 crc kubenswrapper[4708]: I0203 07:46:45.854984 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jqhrb/must-gather-xxf6t" podStartSLOduration=2.8549538439999997 podStartE2EDuration="2.854953844s" podCreationTimestamp="2026-02-03 07:46:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:46:45.847582925 +0000 UTC m=+2184.829529752" watchObservedRunningTime="2026-02-03 07:46:45.854953844 +0000 UTC m=+2184.836900661" Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.319030 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jqhrb/crc-debug-xb8wd"] Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.321096 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.326527 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jqhrb"/"default-dockercfg-zxjdm" Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.450298 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8980e7be-a924-4608-84ba-425fed6b4a42-host\") pod \"crc-debug-xb8wd\" (UID: \"8980e7be-a924-4608-84ba-425fed6b4a42\") " pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.450721 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmxbp\" (UniqueName: \"kubernetes.io/projected/8980e7be-a924-4608-84ba-425fed6b4a42-kube-api-access-pmxbp\") pod \"crc-debug-xb8wd\" (UID: \"8980e7be-a924-4608-84ba-425fed6b4a42\") " pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.552335 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmxbp\" (UniqueName: \"kubernetes.io/projected/8980e7be-a924-4608-84ba-425fed6b4a42-kube-api-access-pmxbp\") pod \"crc-debug-xb8wd\" (UID: \"8980e7be-a924-4608-84ba-425fed6b4a42\") " pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.552481 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8980e7be-a924-4608-84ba-425fed6b4a42-host\") pod \"crc-debug-xb8wd\" (UID: \"8980e7be-a924-4608-84ba-425fed6b4a42\") " pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.552599 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8980e7be-a924-4608-84ba-425fed6b4a42-host\") pod \"crc-debug-xb8wd\" (UID: \"8980e7be-a924-4608-84ba-425fed6b4a42\") " pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.578469 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmxbp\" (UniqueName: \"kubernetes.io/projected/8980e7be-a924-4608-84ba-425fed6b4a42-kube-api-access-pmxbp\") pod \"crc-debug-xb8wd\" (UID: \"8980e7be-a924-4608-84ba-425fed6b4a42\") " pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.644308 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" Feb 03 07:46:48 crc kubenswrapper[4708]: W0203 07:46:48.681014 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8980e7be_a924_4608_84ba_425fed6b4a42.slice/crio-3c13926968a167e1a3df155afa5cae63f6feb5513f6723ee86855b84aeeca6f1 WatchSource:0}: Error finding container 3c13926968a167e1a3df155afa5cae63f6feb5513f6723ee86855b84aeeca6f1: Status 404 returned error can't find the container with id 3c13926968a167e1a3df155afa5cae63f6feb5513f6723ee86855b84aeeca6f1 Feb 03 07:46:48 crc kubenswrapper[4708]: I0203 07:46:48.855234 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" event={"ID":"8980e7be-a924-4608-84ba-425fed6b4a42","Type":"ContainerStarted","Data":"3c13926968a167e1a3df155afa5cae63f6feb5513f6723ee86855b84aeeca6f1"} Feb 03 07:46:49 crc kubenswrapper[4708]: I0203 07:46:49.507514 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:49 crc kubenswrapper[4708]: I0203 07:46:49.507806 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:49 crc kubenswrapper[4708]: I0203 07:46:49.561775 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:49 crc kubenswrapper[4708]: I0203 07:46:49.864617 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" event={"ID":"8980e7be-a924-4608-84ba-425fed6b4a42","Type":"ContainerStarted","Data":"e882384096afc84bd7d35225929d342e6f35517f7227f998f3bf7c128c3b66e2"} Feb 03 07:46:49 crc kubenswrapper[4708]: I0203 07:46:49.884012 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" podStartSLOduration=1.8839890339999998 podStartE2EDuration="1.883989034s" podCreationTimestamp="2026-02-03 07:46:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-03 07:46:49.878697535 +0000 UTC m=+2188.860644362" watchObservedRunningTime="2026-02-03 07:46:49.883989034 +0000 UTC m=+2188.865935851" Feb 03 07:46:49 crc kubenswrapper[4708]: I0203 07:46:49.921342 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:49 crc kubenswrapper[4708]: I0203 07:46:49.975571 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g4tw9"] Feb 03 07:46:51 crc kubenswrapper[4708]: I0203 07:46:51.881189 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g4tw9" podUID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerName="registry-server" containerID="cri-o://2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86" gracePeriod=2 Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.478311 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.557934 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-utilities\") pod \"e0f3321e-cd96-4baf-bff8-8b906113b59f\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.558431 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwc97\" (UniqueName: \"kubernetes.io/projected/e0f3321e-cd96-4baf-bff8-8b906113b59f-kube-api-access-lwc97\") pod \"e0f3321e-cd96-4baf-bff8-8b906113b59f\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.558580 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-catalog-content\") pod \"e0f3321e-cd96-4baf-bff8-8b906113b59f\" (UID: \"e0f3321e-cd96-4baf-bff8-8b906113b59f\") " Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.558695 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-utilities" (OuterVolumeSpecName: "utilities") pod "e0f3321e-cd96-4baf-bff8-8b906113b59f" (UID: "e0f3321e-cd96-4baf-bff8-8b906113b59f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.564588 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0f3321e-cd96-4baf-bff8-8b906113b59f-kube-api-access-lwc97" (OuterVolumeSpecName: "kube-api-access-lwc97") pod "e0f3321e-cd96-4baf-bff8-8b906113b59f" (UID: "e0f3321e-cd96-4baf-bff8-8b906113b59f"). InnerVolumeSpecName "kube-api-access-lwc97". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.583559 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwc97\" (UniqueName: \"kubernetes.io/projected/e0f3321e-cd96-4baf-bff8-8b906113b59f-kube-api-access-lwc97\") on node \"crc\" DevicePath \"\"" Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.583629 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.891340 4708 generic.go:334] "Generic (PLEG): container finished" podID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerID="2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86" exitCode=0 Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.891387 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4tw9" event={"ID":"e0f3321e-cd96-4baf-bff8-8b906113b59f","Type":"ContainerDied","Data":"2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86"} Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.891419 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4tw9" event={"ID":"e0f3321e-cd96-4baf-bff8-8b906113b59f","Type":"ContainerDied","Data":"0fbb1b8fc4a9ac4ce300239ba7fa1c6c8e26cbe0cd91453785cd74ff38553f39"} Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.891444 4708 scope.go:117] "RemoveContainer" containerID="2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86" Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.891392 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g4tw9" Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.923639 4708 scope.go:117] "RemoveContainer" containerID="d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf" Feb 03 07:46:52 crc kubenswrapper[4708]: I0203 07:46:52.979188 4708 scope.go:117] "RemoveContainer" containerID="8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.010511 4708 scope.go:117] "RemoveContainer" containerID="2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86" Feb 03 07:46:53 crc kubenswrapper[4708]: E0203 07:46:53.010941 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86\": container with ID starting with 2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86 not found: ID does not exist" containerID="2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.010991 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86"} err="failed to get container status \"2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86\": rpc error: code = NotFound desc = could not find container \"2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86\": container with ID starting with 2dfa6f6cf4849495dcce97a9e052dcd650b3c8c3b40b2645ce22618b815f8d86 not found: ID does not exist" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.011020 4708 scope.go:117] "RemoveContainer" containerID="d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf" Feb 03 07:46:53 crc kubenswrapper[4708]: E0203 07:46:53.011367 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf\": container with ID starting with d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf not found: ID does not exist" containerID="d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.011396 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf"} err="failed to get container status \"d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf\": rpc error: code = NotFound desc = could not find container \"d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf\": container with ID starting with d4263af4c364cc3fda8c5dbe483b1c2f527facf1a73cba2745b88530d47784cf not found: ID does not exist" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.011417 4708 scope.go:117] "RemoveContainer" containerID="8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856" Feb 03 07:46:53 crc kubenswrapper[4708]: E0203 07:46:53.011903 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856\": container with ID starting with 8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856 not found: ID does not exist" containerID="8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.011943 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856"} err="failed to get container status \"8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856\": rpc error: code = NotFound desc = could not find container \"8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856\": container with ID starting with 8b7c0f0cfaf28f5791dd7283e5c3d248669e35c76e2e4076651415c86805d856 not found: ID does not exist" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.322916 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e0f3321e-cd96-4baf-bff8-8b906113b59f" (UID: "e0f3321e-cd96-4baf-bff8-8b906113b59f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.398508 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0f3321e-cd96-4baf-bff8-8b906113b59f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.537009 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g4tw9"] Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.548265 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g4tw9"] Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.833151 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.834241 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.834300 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.835161 4708 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e97c1c7a8a378f9a3bb4de9efdba7a03465531ea21b43635da735f593a89eaba"} pod="openshift-machine-config-operator/machine-config-daemon-r94bn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:46:53 crc kubenswrapper[4708]: I0203 07:46:53.835228 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" containerID="cri-o://e97c1c7a8a378f9a3bb4de9efdba7a03465531ea21b43635da735f593a89eaba" gracePeriod=600 Feb 03 07:46:54 crc kubenswrapper[4708]: I0203 07:46:54.103718 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0f3321e-cd96-4baf-bff8-8b906113b59f" path="/var/lib/kubelet/pods/e0f3321e-cd96-4baf-bff8-8b906113b59f/volumes" Feb 03 07:46:54 crc kubenswrapper[4708]: I0203 07:46:54.911915 4708 generic.go:334] "Generic (PLEG): container finished" podID="67498414-5132-496e-9638-189f5941ace0" containerID="e97c1c7a8a378f9a3bb4de9efdba7a03465531ea21b43635da735f593a89eaba" exitCode=0 Feb 03 07:46:54 crc kubenswrapper[4708]: I0203 07:46:54.911976 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerDied","Data":"e97c1c7a8a378f9a3bb4de9efdba7a03465531ea21b43635da735f593a89eaba"} Feb 03 07:46:54 crc kubenswrapper[4708]: I0203 07:46:54.912359 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5"} Feb 03 07:46:54 crc kubenswrapper[4708]: I0203 07:46:54.912376 4708 scope.go:117] "RemoveContainer" containerID="1ba1fa95bcd76576835f1adc4897dcd5bfdd17c0bca82d12db1122a16e0a38a5" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.429222 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l4v92"] Feb 03 07:47:17 crc kubenswrapper[4708]: E0203 07:47:17.431606 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerName="extract-content" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.433539 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerName="extract-content" Feb 03 07:47:17 crc kubenswrapper[4708]: E0203 07:47:17.433654 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerName="extract-utilities" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.433712 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerName="extract-utilities" Feb 03 07:47:17 crc kubenswrapper[4708]: E0203 07:47:17.433772 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerName="registry-server" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.433843 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerName="registry-server" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.434225 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0f3321e-cd96-4baf-bff8-8b906113b59f" containerName="registry-server" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.435592 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.443455 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l4v92"] Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.574140 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4x8r\" (UniqueName: \"kubernetes.io/projected/503d00e6-ef42-48c0-8567-406b18044025-kube-api-access-x4x8r\") pod \"redhat-marketplace-l4v92\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.574223 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-utilities\") pod \"redhat-marketplace-l4v92\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.574595 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-catalog-content\") pod \"redhat-marketplace-l4v92\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.676685 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-catalog-content\") pod \"redhat-marketplace-l4v92\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.676822 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4x8r\" (UniqueName: \"kubernetes.io/projected/503d00e6-ef42-48c0-8567-406b18044025-kube-api-access-x4x8r\") pod \"redhat-marketplace-l4v92\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.676863 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-utilities\") pod \"redhat-marketplace-l4v92\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.677283 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-catalog-content\") pod \"redhat-marketplace-l4v92\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.677296 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-utilities\") pod \"redhat-marketplace-l4v92\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.715039 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4x8r\" (UniqueName: \"kubernetes.io/projected/503d00e6-ef42-48c0-8567-406b18044025-kube-api-access-x4x8r\") pod \"redhat-marketplace-l4v92\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:17 crc kubenswrapper[4708]: I0203 07:47:17.755405 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:18 crc kubenswrapper[4708]: I0203 07:47:18.417254 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l4v92"] Feb 03 07:47:18 crc kubenswrapper[4708]: I0203 07:47:18.819660 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-76pxr"] Feb 03 07:47:18 crc kubenswrapper[4708]: I0203 07:47:18.822336 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:18 crc kubenswrapper[4708]: I0203 07:47:18.833528 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-76pxr"] Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.013487 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-catalog-content\") pod \"redhat-operators-76pxr\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.013526 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl7cw\" (UniqueName: \"kubernetes.io/projected/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-kube-api-access-sl7cw\") pod \"redhat-operators-76pxr\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.013593 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-utilities\") pod \"redhat-operators-76pxr\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.115100 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-catalog-content\") pod \"redhat-operators-76pxr\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.115148 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl7cw\" (UniqueName: \"kubernetes.io/projected/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-kube-api-access-sl7cw\") pod \"redhat-operators-76pxr\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.115246 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-utilities\") pod \"redhat-operators-76pxr\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.115927 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-utilities\") pod \"redhat-operators-76pxr\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.116239 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-catalog-content\") pod \"redhat-operators-76pxr\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.160727 4708 generic.go:334] "Generic (PLEG): container finished" podID="503d00e6-ef42-48c0-8567-406b18044025" containerID="c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2" exitCode=0 Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.160771 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l4v92" event={"ID":"503d00e6-ef42-48c0-8567-406b18044025","Type":"ContainerDied","Data":"c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2"} Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.160817 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l4v92" event={"ID":"503d00e6-ef42-48c0-8567-406b18044025","Type":"ContainerStarted","Data":"e092ad760434021e973f4e743cde060960763539c7ef67a5ffe13c80cac8ed02"} Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.162553 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl7cw\" (UniqueName: \"kubernetes.io/projected/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-kube-api-access-sl7cw\") pod \"redhat-operators-76pxr\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.444872 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.823341 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bj5kk"] Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.825943 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.843983 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bj5kk"] Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.932885 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-utilities\") pod \"certified-operators-bj5kk\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.932936 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-catalog-content\") pod \"certified-operators-bj5kk\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.932978 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzq96\" (UniqueName: \"kubernetes.io/projected/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-kube-api-access-kzq96\") pod \"certified-operators-bj5kk\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:19 crc kubenswrapper[4708]: W0203 07:47:19.968848 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3bf673e8_0f8a_4bc8_8c9c_cb875b353142.slice/crio-9d8df4ec0aee17859a134e8f3b36a5635184b93d5e6dee50ec230aa6b69d5f1e WatchSource:0}: Error finding container 9d8df4ec0aee17859a134e8f3b36a5635184b93d5e6dee50ec230aa6b69d5f1e: Status 404 returned error can't find the container with id 9d8df4ec0aee17859a134e8f3b36a5635184b93d5e6dee50ec230aa6b69d5f1e Feb 03 07:47:19 crc kubenswrapper[4708]: I0203 07:47:19.972978 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-76pxr"] Feb 03 07:47:20 crc kubenswrapper[4708]: I0203 07:47:20.034539 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-utilities\") pod \"certified-operators-bj5kk\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:20 crc kubenswrapper[4708]: I0203 07:47:20.034578 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-catalog-content\") pod \"certified-operators-bj5kk\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:20 crc kubenswrapper[4708]: I0203 07:47:20.034609 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzq96\" (UniqueName: \"kubernetes.io/projected/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-kube-api-access-kzq96\") pod \"certified-operators-bj5kk\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:20 crc kubenswrapper[4708]: I0203 07:47:20.035154 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-utilities\") pod \"certified-operators-bj5kk\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:20 crc kubenswrapper[4708]: I0203 07:47:20.035266 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-catalog-content\") pod \"certified-operators-bj5kk\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:20 crc kubenswrapper[4708]: I0203 07:47:20.060508 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzq96\" (UniqueName: \"kubernetes.io/projected/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-kube-api-access-kzq96\") pod \"certified-operators-bj5kk\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:20 crc kubenswrapper[4708]: I0203 07:47:20.154217 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:20 crc kubenswrapper[4708]: I0203 07:47:20.187717 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76pxr" event={"ID":"3bf673e8-0f8a-4bc8-8c9c-cb875b353142","Type":"ContainerStarted","Data":"9d8df4ec0aee17859a134e8f3b36a5635184b93d5e6dee50ec230aa6b69d5f1e"} Feb 03 07:47:20 crc kubenswrapper[4708]: I0203 07:47:20.197762 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l4v92" event={"ID":"503d00e6-ef42-48c0-8567-406b18044025","Type":"ContainerStarted","Data":"9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5"} Feb 03 07:47:20 crc kubenswrapper[4708]: I0203 07:47:20.775419 4708 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bj5kk"] Feb 03 07:47:21 crc kubenswrapper[4708]: I0203 07:47:21.207045 4708 generic.go:334] "Generic (PLEG): container finished" podID="3bf673e8-0f8a-4bc8-8c9c-cb875b353142" containerID="83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a" exitCode=0 Feb 03 07:47:21 crc kubenswrapper[4708]: I0203 07:47:21.207101 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76pxr" event={"ID":"3bf673e8-0f8a-4bc8-8c9c-cb875b353142","Type":"ContainerDied","Data":"83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a"} Feb 03 07:47:21 crc kubenswrapper[4708]: I0203 07:47:21.211750 4708 generic.go:334] "Generic (PLEG): container finished" podID="56a1125e-b2b7-47e2-b94a-faf5c9bedcff" containerID="0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f" exitCode=0 Feb 03 07:47:21 crc kubenswrapper[4708]: I0203 07:47:21.211846 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj5kk" event={"ID":"56a1125e-b2b7-47e2-b94a-faf5c9bedcff","Type":"ContainerDied","Data":"0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f"} Feb 03 07:47:21 crc kubenswrapper[4708]: I0203 07:47:21.211877 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj5kk" event={"ID":"56a1125e-b2b7-47e2-b94a-faf5c9bedcff","Type":"ContainerStarted","Data":"c39ae1dc005ed505053defbd300904530b92dd8c4ae3ec175772de560f8dc143"} Feb 03 07:47:21 crc kubenswrapper[4708]: I0203 07:47:21.216928 4708 generic.go:334] "Generic (PLEG): container finished" podID="503d00e6-ef42-48c0-8567-406b18044025" containerID="9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5" exitCode=0 Feb 03 07:47:21 crc kubenswrapper[4708]: I0203 07:47:21.216992 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l4v92" event={"ID":"503d00e6-ef42-48c0-8567-406b18044025","Type":"ContainerDied","Data":"9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5"} Feb 03 07:47:22 crc kubenswrapper[4708]: I0203 07:47:22.227197 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l4v92" event={"ID":"503d00e6-ef42-48c0-8567-406b18044025","Type":"ContainerStarted","Data":"c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea"} Feb 03 07:47:22 crc kubenswrapper[4708]: I0203 07:47:22.248494 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l4v92" podStartSLOduration=2.787371882 podStartE2EDuration="5.248480356s" podCreationTimestamp="2026-02-03 07:47:17 +0000 UTC" firstStartedPulling="2026-02-03 07:47:19.162481622 +0000 UTC m=+2218.144428429" lastFinishedPulling="2026-02-03 07:47:21.623590106 +0000 UTC m=+2220.605536903" observedRunningTime="2026-02-03 07:47:22.247846321 +0000 UTC m=+2221.229793128" watchObservedRunningTime="2026-02-03 07:47:22.248480356 +0000 UTC m=+2221.230427163" Feb 03 07:47:23 crc kubenswrapper[4708]: I0203 07:47:23.238527 4708 generic.go:334] "Generic (PLEG): container finished" podID="3bf673e8-0f8a-4bc8-8c9c-cb875b353142" containerID="d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41" exitCode=0 Feb 03 07:47:23 crc kubenswrapper[4708]: I0203 07:47:23.238570 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76pxr" event={"ID":"3bf673e8-0f8a-4bc8-8c9c-cb875b353142","Type":"ContainerDied","Data":"d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41"} Feb 03 07:47:23 crc kubenswrapper[4708]: I0203 07:47:23.242433 4708 generic.go:334] "Generic (PLEG): container finished" podID="56a1125e-b2b7-47e2-b94a-faf5c9bedcff" containerID="43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007" exitCode=0 Feb 03 07:47:23 crc kubenswrapper[4708]: I0203 07:47:23.242495 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj5kk" event={"ID":"56a1125e-b2b7-47e2-b94a-faf5c9bedcff","Type":"ContainerDied","Data":"43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007"} Feb 03 07:47:24 crc kubenswrapper[4708]: I0203 07:47:24.254860 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76pxr" event={"ID":"3bf673e8-0f8a-4bc8-8c9c-cb875b353142","Type":"ContainerStarted","Data":"84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098"} Feb 03 07:47:24 crc kubenswrapper[4708]: I0203 07:47:24.258921 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj5kk" event={"ID":"56a1125e-b2b7-47e2-b94a-faf5c9bedcff","Type":"ContainerStarted","Data":"e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6"} Feb 03 07:47:24 crc kubenswrapper[4708]: I0203 07:47:24.260780 4708 generic.go:334] "Generic (PLEG): container finished" podID="8980e7be-a924-4608-84ba-425fed6b4a42" containerID="e882384096afc84bd7d35225929d342e6f35517f7227f998f3bf7c128c3b66e2" exitCode=0 Feb 03 07:47:24 crc kubenswrapper[4708]: I0203 07:47:24.260863 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" event={"ID":"8980e7be-a924-4608-84ba-425fed6b4a42","Type":"ContainerDied","Data":"e882384096afc84bd7d35225929d342e6f35517f7227f998f3bf7c128c3b66e2"} Feb 03 07:47:24 crc kubenswrapper[4708]: I0203 07:47:24.279690 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-76pxr" podStartSLOduration=3.791249623 podStartE2EDuration="6.279664922s" podCreationTimestamp="2026-02-03 07:47:18 +0000 UTC" firstStartedPulling="2026-02-03 07:47:21.209038293 +0000 UTC m=+2220.190985090" lastFinishedPulling="2026-02-03 07:47:23.697453592 +0000 UTC m=+2222.679400389" observedRunningTime="2026-02-03 07:47:24.272297122 +0000 UTC m=+2223.254243919" watchObservedRunningTime="2026-02-03 07:47:24.279664922 +0000 UTC m=+2223.261611729" Feb 03 07:47:24 crc kubenswrapper[4708]: I0203 07:47:24.302433 4708 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bj5kk" podStartSLOduration=2.758970316 podStartE2EDuration="5.302406746s" podCreationTimestamp="2026-02-03 07:47:19 +0000 UTC" firstStartedPulling="2026-02-03 07:47:21.214776312 +0000 UTC m=+2220.196723119" lastFinishedPulling="2026-02-03 07:47:23.758212742 +0000 UTC m=+2222.740159549" observedRunningTime="2026-02-03 07:47:24.292493145 +0000 UTC m=+2223.274439972" watchObservedRunningTime="2026-02-03 07:47:24.302406746 +0000 UTC m=+2223.284353563" Feb 03 07:47:25 crc kubenswrapper[4708]: I0203 07:47:25.390735 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" Feb 03 07:47:25 crc kubenswrapper[4708]: I0203 07:47:25.434307 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jqhrb/crc-debug-xb8wd"] Feb 03 07:47:25 crc kubenswrapper[4708]: I0203 07:47:25.443329 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jqhrb/crc-debug-xb8wd"] Feb 03 07:47:25 crc kubenswrapper[4708]: I0203 07:47:25.552671 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8980e7be-a924-4608-84ba-425fed6b4a42-host\") pod \"8980e7be-a924-4608-84ba-425fed6b4a42\" (UID: \"8980e7be-a924-4608-84ba-425fed6b4a42\") " Feb 03 07:47:25 crc kubenswrapper[4708]: I0203 07:47:25.552829 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmxbp\" (UniqueName: \"kubernetes.io/projected/8980e7be-a924-4608-84ba-425fed6b4a42-kube-api-access-pmxbp\") pod \"8980e7be-a924-4608-84ba-425fed6b4a42\" (UID: \"8980e7be-a924-4608-84ba-425fed6b4a42\") " Feb 03 07:47:25 crc kubenswrapper[4708]: I0203 07:47:25.553084 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8980e7be-a924-4608-84ba-425fed6b4a42-host" (OuterVolumeSpecName: "host") pod "8980e7be-a924-4608-84ba-425fed6b4a42" (UID: "8980e7be-a924-4608-84ba-425fed6b4a42"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:47:25 crc kubenswrapper[4708]: I0203 07:47:25.553302 4708 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8980e7be-a924-4608-84ba-425fed6b4a42-host\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:25 crc kubenswrapper[4708]: I0203 07:47:25.564928 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8980e7be-a924-4608-84ba-425fed6b4a42-kube-api-access-pmxbp" (OuterVolumeSpecName: "kube-api-access-pmxbp") pod "8980e7be-a924-4608-84ba-425fed6b4a42" (UID: "8980e7be-a924-4608-84ba-425fed6b4a42"). InnerVolumeSpecName "kube-api-access-pmxbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:47:25 crc kubenswrapper[4708]: I0203 07:47:25.654646 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmxbp\" (UniqueName: \"kubernetes.io/projected/8980e7be-a924-4608-84ba-425fed6b4a42-kube-api-access-pmxbp\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:26 crc kubenswrapper[4708]: I0203 07:47:26.103052 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8980e7be-a924-4608-84ba-425fed6b4a42" path="/var/lib/kubelet/pods/8980e7be-a924-4608-84ba-425fed6b4a42/volumes" Feb 03 07:47:26 crc kubenswrapper[4708]: I0203 07:47:26.277215 4708 scope.go:117] "RemoveContainer" containerID="e882384096afc84bd7d35225929d342e6f35517f7227f998f3bf7c128c3b66e2" Feb 03 07:47:26 crc kubenswrapper[4708]: I0203 07:47:26.277280 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-xb8wd" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.231846 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jqhrb/crc-debug-bsqzw"] Feb 03 07:47:27 crc kubenswrapper[4708]: E0203 07:47:27.232197 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8980e7be-a924-4608-84ba-425fed6b4a42" containerName="container-00" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.232207 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="8980e7be-a924-4608-84ba-425fed6b4a42" containerName="container-00" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.232392 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="8980e7be-a924-4608-84ba-425fed6b4a42" containerName="container-00" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.232975 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.234364 4708 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jqhrb"/"default-dockercfg-zxjdm" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.388759 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2678830c-1cce-4441-8a9c-955652fad966-host\") pod \"crc-debug-bsqzw\" (UID: \"2678830c-1cce-4441-8a9c-955652fad966\") " pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.388860 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2v76\" (UniqueName: \"kubernetes.io/projected/2678830c-1cce-4441-8a9c-955652fad966-kube-api-access-w2v76\") pod \"crc-debug-bsqzw\" (UID: \"2678830c-1cce-4441-8a9c-955652fad966\") " pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.490994 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2678830c-1cce-4441-8a9c-955652fad966-host\") pod \"crc-debug-bsqzw\" (UID: \"2678830c-1cce-4441-8a9c-955652fad966\") " pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.491272 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2v76\" (UniqueName: \"kubernetes.io/projected/2678830c-1cce-4441-8a9c-955652fad966-kube-api-access-w2v76\") pod \"crc-debug-bsqzw\" (UID: \"2678830c-1cce-4441-8a9c-955652fad966\") " pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.491176 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2678830c-1cce-4441-8a9c-955652fad966-host\") pod \"crc-debug-bsqzw\" (UID: \"2678830c-1cce-4441-8a9c-955652fad966\") " pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.514766 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2v76\" (UniqueName: \"kubernetes.io/projected/2678830c-1cce-4441-8a9c-955652fad966-kube-api-access-w2v76\") pod \"crc-debug-bsqzw\" (UID: \"2678830c-1cce-4441-8a9c-955652fad966\") " pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.547902 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.756856 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.756926 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:27 crc kubenswrapper[4708]: I0203 07:47:27.810378 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:28 crc kubenswrapper[4708]: I0203 07:47:28.296205 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" event={"ID":"2678830c-1cce-4441-8a9c-955652fad966","Type":"ContainerStarted","Data":"12ffdf852a81d14ff0f55c65335801935afcddbda2bf88ae808cfb8bce1d99c0"} Feb 03 07:47:28 crc kubenswrapper[4708]: I0203 07:47:28.354632 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:29 crc kubenswrapper[4708]: I0203 07:47:29.309956 4708 generic.go:334] "Generic (PLEG): container finished" podID="2678830c-1cce-4441-8a9c-955652fad966" containerID="e6ba773b972253f8b816d002cf58a659ba85fafb74fab83d306cd74d8378a7b5" exitCode=0 Feb 03 07:47:29 crc kubenswrapper[4708]: I0203 07:47:29.310013 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" event={"ID":"2678830c-1cce-4441-8a9c-955652fad966","Type":"ContainerDied","Data":"e6ba773b972253f8b816d002cf58a659ba85fafb74fab83d306cd74d8378a7b5"} Feb 03 07:47:29 crc kubenswrapper[4708]: I0203 07:47:29.444981 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:29 crc kubenswrapper[4708]: I0203 07:47:29.445029 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:29 crc kubenswrapper[4708]: I0203 07:47:29.609747 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l4v92"] Feb 03 07:47:29 crc kubenswrapper[4708]: I0203 07:47:29.767482 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jqhrb/crc-debug-bsqzw"] Feb 03 07:47:29 crc kubenswrapper[4708]: I0203 07:47:29.781244 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jqhrb/crc-debug-bsqzw"] Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.154874 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.154995 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.199544 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.319059 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l4v92" podUID="503d00e6-ef42-48c0-8567-406b18044025" containerName="registry-server" containerID="cri-o://c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea" gracePeriod=2 Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.372637 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.498076 4708 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-76pxr" podUID="3bf673e8-0f8a-4bc8-8c9c-cb875b353142" containerName="registry-server" probeResult="failure" output=< Feb 03 07:47:30 crc kubenswrapper[4708]: timeout: failed to connect service ":50051" within 1s Feb 03 07:47:30 crc kubenswrapper[4708]: > Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.550928 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.650985 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2678830c-1cce-4441-8a9c-955652fad966-host\") pod \"2678830c-1cce-4441-8a9c-955652fad966\" (UID: \"2678830c-1cce-4441-8a9c-955652fad966\") " Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.651077 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2678830c-1cce-4441-8a9c-955652fad966-host" (OuterVolumeSpecName: "host") pod "2678830c-1cce-4441-8a9c-955652fad966" (UID: "2678830c-1cce-4441-8a9c-955652fad966"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.651237 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2v76\" (UniqueName: \"kubernetes.io/projected/2678830c-1cce-4441-8a9c-955652fad966-kube-api-access-w2v76\") pod \"2678830c-1cce-4441-8a9c-955652fad966\" (UID: \"2678830c-1cce-4441-8a9c-955652fad966\") " Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.651717 4708 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2678830c-1cce-4441-8a9c-955652fad966-host\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.656384 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2678830c-1cce-4441-8a9c-955652fad966-kube-api-access-w2v76" (OuterVolumeSpecName: "kube-api-access-w2v76") pod "2678830c-1cce-4441-8a9c-955652fad966" (UID: "2678830c-1cce-4441-8a9c-955652fad966"). InnerVolumeSpecName "kube-api-access-w2v76". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.753310 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2v76\" (UniqueName: \"kubernetes.io/projected/2678830c-1cce-4441-8a9c-955652fad966-kube-api-access-w2v76\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.824731 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.957232 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-catalog-content\") pod \"503d00e6-ef42-48c0-8567-406b18044025\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.957471 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-utilities\") pod \"503d00e6-ef42-48c0-8567-406b18044025\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.957500 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4x8r\" (UniqueName: \"kubernetes.io/projected/503d00e6-ef42-48c0-8567-406b18044025-kube-api-access-x4x8r\") pod \"503d00e6-ef42-48c0-8567-406b18044025\" (UID: \"503d00e6-ef42-48c0-8567-406b18044025\") " Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.958143 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-utilities" (OuterVolumeSpecName: "utilities") pod "503d00e6-ef42-48c0-8567-406b18044025" (UID: "503d00e6-ef42-48c0-8567-406b18044025"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.960943 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/503d00e6-ef42-48c0-8567-406b18044025-kube-api-access-x4x8r" (OuterVolumeSpecName: "kube-api-access-x4x8r") pod "503d00e6-ef42-48c0-8567-406b18044025" (UID: "503d00e6-ef42-48c0-8567-406b18044025"). InnerVolumeSpecName "kube-api-access-x4x8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.990097 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "503d00e6-ef42-48c0-8567-406b18044025" (UID: "503d00e6-ef42-48c0-8567-406b18044025"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.990956 4708 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jqhrb/crc-debug-7m9zt"] Feb 03 07:47:30 crc kubenswrapper[4708]: E0203 07:47:30.991340 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="503d00e6-ef42-48c0-8567-406b18044025" containerName="extract-utilities" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.991357 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="503d00e6-ef42-48c0-8567-406b18044025" containerName="extract-utilities" Feb 03 07:47:30 crc kubenswrapper[4708]: E0203 07:47:30.991382 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="503d00e6-ef42-48c0-8567-406b18044025" containerName="registry-server" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.991389 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="503d00e6-ef42-48c0-8567-406b18044025" containerName="registry-server" Feb 03 07:47:30 crc kubenswrapper[4708]: E0203 07:47:30.991413 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2678830c-1cce-4441-8a9c-955652fad966" containerName="container-00" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.991419 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="2678830c-1cce-4441-8a9c-955652fad966" containerName="container-00" Feb 03 07:47:30 crc kubenswrapper[4708]: E0203 07:47:30.991434 4708 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="503d00e6-ef42-48c0-8567-406b18044025" containerName="extract-content" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.991440 4708 state_mem.go:107] "Deleted CPUSet assignment" podUID="503d00e6-ef42-48c0-8567-406b18044025" containerName="extract-content" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.991598 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="2678830c-1cce-4441-8a9c-955652fad966" containerName="container-00" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.991612 4708 memory_manager.go:354] "RemoveStaleState removing state" podUID="503d00e6-ef42-48c0-8567-406b18044025" containerName="registry-server" Feb 03 07:47:30 crc kubenswrapper[4708]: I0203 07:47:30.992299 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.060371 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.060638 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4x8r\" (UniqueName: \"kubernetes.io/projected/503d00e6-ef42-48c0-8567-406b18044025-kube-api-access-x4x8r\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.060744 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/503d00e6-ef42-48c0-8567-406b18044025-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.162038 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/08083c6a-d9a8-4f81-b444-84084b1be828-host\") pod \"crc-debug-7m9zt\" (UID: \"08083c6a-d9a8-4f81-b444-84084b1be828\") " pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.162218 4708 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc96n\" (UniqueName: \"kubernetes.io/projected/08083c6a-d9a8-4f81-b444-84084b1be828-kube-api-access-jc96n\") pod \"crc-debug-7m9zt\" (UID: \"08083c6a-d9a8-4f81-b444-84084b1be828\") " pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.265181 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/08083c6a-d9a8-4f81-b444-84084b1be828-host\") pod \"crc-debug-7m9zt\" (UID: \"08083c6a-d9a8-4f81-b444-84084b1be828\") " pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.265820 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/08083c6a-d9a8-4f81-b444-84084b1be828-host\") pod \"crc-debug-7m9zt\" (UID: \"08083c6a-d9a8-4f81-b444-84084b1be828\") " pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.266453 4708 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc96n\" (UniqueName: \"kubernetes.io/projected/08083c6a-d9a8-4f81-b444-84084b1be828-kube-api-access-jc96n\") pod \"crc-debug-7m9zt\" (UID: \"08083c6a-d9a8-4f81-b444-84084b1be828\") " pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.284736 4708 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc96n\" (UniqueName: \"kubernetes.io/projected/08083c6a-d9a8-4f81-b444-84084b1be828-kube-api-access-jc96n\") pod \"crc-debug-7m9zt\" (UID: \"08083c6a-d9a8-4f81-b444-84084b1be828\") " pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.309483 4708 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.335263 4708 scope.go:117] "RemoveContainer" containerID="e6ba773b972253f8b816d002cf58a659ba85fafb74fab83d306cd74d8378a7b5" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.335388 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-bsqzw" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.358892 4708 generic.go:334] "Generic (PLEG): container finished" podID="503d00e6-ef42-48c0-8567-406b18044025" containerID="c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea" exitCode=0 Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.358988 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l4v92" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.358982 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l4v92" event={"ID":"503d00e6-ef42-48c0-8567-406b18044025","Type":"ContainerDied","Data":"c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea"} Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.359075 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l4v92" event={"ID":"503d00e6-ef42-48c0-8567-406b18044025","Type":"ContainerDied","Data":"e092ad760434021e973f4e743cde060960763539c7ef67a5ffe13c80cac8ed02"} Feb 03 07:47:31 crc kubenswrapper[4708]: W0203 07:47:31.364265 4708 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08083c6a_d9a8_4f81_b444_84084b1be828.slice/crio-fadf227f098d9ffd6c8ee002b0ef0737c55d7656eb3563bfcfdacff8fb058e8c WatchSource:0}: Error finding container fadf227f098d9ffd6c8ee002b0ef0737c55d7656eb3563bfcfdacff8fb058e8c: Status 404 returned error can't find the container with id fadf227f098d9ffd6c8ee002b0ef0737c55d7656eb3563bfcfdacff8fb058e8c Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.499184 4708 scope.go:117] "RemoveContainer" containerID="c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.570342 4708 scope.go:117] "RemoveContainer" containerID="9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.571124 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l4v92"] Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.581774 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l4v92"] Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.587161 4708 scope.go:117] "RemoveContainer" containerID="c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.650553 4708 scope.go:117] "RemoveContainer" containerID="c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea" Feb 03 07:47:31 crc kubenswrapper[4708]: E0203 07:47:31.650971 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea\": container with ID starting with c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea not found: ID does not exist" containerID="c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.651021 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea"} err="failed to get container status \"c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea\": rpc error: code = NotFound desc = could not find container \"c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea\": container with ID starting with c79b50bf385c0667dc77b5f131ac546c213cbeaea11baddb08b15b1451d54dea not found: ID does not exist" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.651055 4708 scope.go:117] "RemoveContainer" containerID="9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5" Feb 03 07:47:31 crc kubenswrapper[4708]: E0203 07:47:31.651386 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5\": container with ID starting with 9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5 not found: ID does not exist" containerID="9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.651412 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5"} err="failed to get container status \"9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5\": rpc error: code = NotFound desc = could not find container \"9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5\": container with ID starting with 9c79c7ba1d2f51645b2c63103067a7cf77685383906110a0ae9cb1854d1f77f5 not found: ID does not exist" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.651433 4708 scope.go:117] "RemoveContainer" containerID="c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2" Feb 03 07:47:31 crc kubenswrapper[4708]: E0203 07:47:31.651762 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2\": container with ID starting with c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2 not found: ID does not exist" containerID="c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2" Feb 03 07:47:31 crc kubenswrapper[4708]: I0203 07:47:31.651808 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2"} err="failed to get container status \"c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2\": rpc error: code = NotFound desc = could not find container \"c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2\": container with ID starting with c135ba6f11bfe1492d8cd8989bd166cb25f4546aae2a2408a141ba42774a2af2 not found: ID does not exist" Feb 03 07:47:32 crc kubenswrapper[4708]: I0203 07:47:32.106209 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2678830c-1cce-4441-8a9c-955652fad966" path="/var/lib/kubelet/pods/2678830c-1cce-4441-8a9c-955652fad966/volumes" Feb 03 07:47:32 crc kubenswrapper[4708]: I0203 07:47:32.107234 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="503d00e6-ef42-48c0-8567-406b18044025" path="/var/lib/kubelet/pods/503d00e6-ef42-48c0-8567-406b18044025/volumes" Feb 03 07:47:32 crc kubenswrapper[4708]: I0203 07:47:32.376392 4708 generic.go:334] "Generic (PLEG): container finished" podID="08083c6a-d9a8-4f81-b444-84084b1be828" containerID="73a7c322225ada3060f5ed4edf8f913d19b6174bf55e71efc92ffbad02de01c8" exitCode=0 Feb 03 07:47:32 crc kubenswrapper[4708]: I0203 07:47:32.377315 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" event={"ID":"08083c6a-d9a8-4f81-b444-84084b1be828","Type":"ContainerDied","Data":"73a7c322225ada3060f5ed4edf8f913d19b6174bf55e71efc92ffbad02de01c8"} Feb 03 07:47:32 crc kubenswrapper[4708]: I0203 07:47:32.377349 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" event={"ID":"08083c6a-d9a8-4f81-b444-84084b1be828","Type":"ContainerStarted","Data":"fadf227f098d9ffd6c8ee002b0ef0737c55d7656eb3563bfcfdacff8fb058e8c"} Feb 03 07:47:32 crc kubenswrapper[4708]: I0203 07:47:32.413167 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jqhrb/crc-debug-7m9zt"] Feb 03 07:47:32 crc kubenswrapper[4708]: I0203 07:47:32.420691 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jqhrb/crc-debug-7m9zt"] Feb 03 07:47:32 crc kubenswrapper[4708]: I0203 07:47:32.609280 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bj5kk"] Feb 03 07:47:33 crc kubenswrapper[4708]: I0203 07:47:33.384326 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bj5kk" podUID="56a1125e-b2b7-47e2-b94a-faf5c9bedcff" containerName="registry-server" containerID="cri-o://e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6" gracePeriod=2 Feb 03 07:47:33 crc kubenswrapper[4708]: I0203 07:47:33.636554 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" Feb 03 07:47:33 crc kubenswrapper[4708]: I0203 07:47:33.735907 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/08083c6a-d9a8-4f81-b444-84084b1be828-host\") pod \"08083c6a-d9a8-4f81-b444-84084b1be828\" (UID: \"08083c6a-d9a8-4f81-b444-84084b1be828\") " Feb 03 07:47:33 crc kubenswrapper[4708]: I0203 07:47:33.736045 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/08083c6a-d9a8-4f81-b444-84084b1be828-host" (OuterVolumeSpecName: "host") pod "08083c6a-d9a8-4f81-b444-84084b1be828" (UID: "08083c6a-d9a8-4f81-b444-84084b1be828"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 03 07:47:33 crc kubenswrapper[4708]: I0203 07:47:33.736060 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc96n\" (UniqueName: \"kubernetes.io/projected/08083c6a-d9a8-4f81-b444-84084b1be828-kube-api-access-jc96n\") pod \"08083c6a-d9a8-4f81-b444-84084b1be828\" (UID: \"08083c6a-d9a8-4f81-b444-84084b1be828\") " Feb 03 07:47:33 crc kubenswrapper[4708]: I0203 07:47:33.737014 4708 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/08083c6a-d9a8-4f81-b444-84084b1be828-host\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:33 crc kubenswrapper[4708]: I0203 07:47:33.742071 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08083c6a-d9a8-4f81-b444-84084b1be828-kube-api-access-jc96n" (OuterVolumeSpecName: "kube-api-access-jc96n") pod "08083c6a-d9a8-4f81-b444-84084b1be828" (UID: "08083c6a-d9a8-4f81-b444-84084b1be828"). InnerVolumeSpecName "kube-api-access-jc96n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:47:33 crc kubenswrapper[4708]: I0203 07:47:33.839266 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc96n\" (UniqueName: \"kubernetes.io/projected/08083c6a-d9a8-4f81-b444-84084b1be828-kube-api-access-jc96n\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:33 crc kubenswrapper[4708]: I0203 07:47:33.870683 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.050306 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-utilities\") pod \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.050760 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzq96\" (UniqueName: \"kubernetes.io/projected/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-kube-api-access-kzq96\") pod \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.051370 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-catalog-content\") pod \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\" (UID: \"56a1125e-b2b7-47e2-b94a-faf5c9bedcff\") " Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.051941 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-utilities" (OuterVolumeSpecName: "utilities") pod "56a1125e-b2b7-47e2-b94a-faf5c9bedcff" (UID: "56a1125e-b2b7-47e2-b94a-faf5c9bedcff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.052684 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.067177 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-kube-api-access-kzq96" (OuterVolumeSpecName: "kube-api-access-kzq96") pod "56a1125e-b2b7-47e2-b94a-faf5c9bedcff" (UID: "56a1125e-b2b7-47e2-b94a-faf5c9bedcff"). InnerVolumeSpecName "kube-api-access-kzq96". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.114734 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08083c6a-d9a8-4f81-b444-84084b1be828" path="/var/lib/kubelet/pods/08083c6a-d9a8-4f81-b444-84084b1be828/volumes" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.115688 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56a1125e-b2b7-47e2-b94a-faf5c9bedcff" (UID: "56a1125e-b2b7-47e2-b94a-faf5c9bedcff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.156649 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.156693 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzq96\" (UniqueName: \"kubernetes.io/projected/56a1125e-b2b7-47e2-b94a-faf5c9bedcff-kube-api-access-kzq96\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.425562 4708 generic.go:334] "Generic (PLEG): container finished" podID="56a1125e-b2b7-47e2-b94a-faf5c9bedcff" containerID="e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6" exitCode=0 Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.425705 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj5kk" event={"ID":"56a1125e-b2b7-47e2-b94a-faf5c9bedcff","Type":"ContainerDied","Data":"e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6"} Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.425783 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj5kk" event={"ID":"56a1125e-b2b7-47e2-b94a-faf5c9bedcff","Type":"ContainerDied","Data":"c39ae1dc005ed505053defbd300904530b92dd8c4ae3ec175772de560f8dc143"} Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.425827 4708 scope.go:117] "RemoveContainer" containerID="e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.425854 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bj5kk" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.434872 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/crc-debug-7m9zt" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.480878 4708 scope.go:117] "RemoveContainer" containerID="43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.482449 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bj5kk"] Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.490285 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bj5kk"] Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.507843 4708 scope.go:117] "RemoveContainer" containerID="0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.560625 4708 scope.go:117] "RemoveContainer" containerID="e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6" Feb 03 07:47:34 crc kubenswrapper[4708]: E0203 07:47:34.561198 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6\": container with ID starting with e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6 not found: ID does not exist" containerID="e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.561256 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6"} err="failed to get container status \"e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6\": rpc error: code = NotFound desc = could not find container \"e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6\": container with ID starting with e0c246a3e3f349b87a4269f09425c739058e22028602d006e9ec6314174649a6 not found: ID does not exist" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.561294 4708 scope.go:117] "RemoveContainer" containerID="43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007" Feb 03 07:47:34 crc kubenswrapper[4708]: E0203 07:47:34.562207 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007\": container with ID starting with 43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007 not found: ID does not exist" containerID="43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.562239 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007"} err="failed to get container status \"43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007\": rpc error: code = NotFound desc = could not find container \"43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007\": container with ID starting with 43f52ecae553c9cbf0d65568df606b8bb79feb53217eefc294031ffab329c007 not found: ID does not exist" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.562263 4708 scope.go:117] "RemoveContainer" containerID="0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f" Feb 03 07:47:34 crc kubenswrapper[4708]: E0203 07:47:34.562635 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f\": container with ID starting with 0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f not found: ID does not exist" containerID="0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.562664 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f"} err="failed to get container status \"0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f\": rpc error: code = NotFound desc = could not find container \"0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f\": container with ID starting with 0a39b1cb5fbea341f6910453d93a949beaf18de9e273db78c34c9e7f40bf7d5f not found: ID does not exist" Feb 03 07:47:34 crc kubenswrapper[4708]: I0203 07:47:34.562680 4708 scope.go:117] "RemoveContainer" containerID="73a7c322225ada3060f5ed4edf8f913d19b6174bf55e71efc92ffbad02de01c8" Feb 03 07:47:36 crc kubenswrapper[4708]: I0203 07:47:36.103667 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56a1125e-b2b7-47e2-b94a-faf5c9bedcff" path="/var/lib/kubelet/pods/56a1125e-b2b7-47e2-b94a-faf5c9bedcff/volumes" Feb 03 07:47:39 crc kubenswrapper[4708]: I0203 07:47:39.492613 4708 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:39 crc kubenswrapper[4708]: I0203 07:47:39.548825 4708 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:39 crc kubenswrapper[4708]: I0203 07:47:39.732224 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-76pxr"] Feb 03 07:47:41 crc kubenswrapper[4708]: I0203 07:47:41.499609 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-76pxr" podUID="3bf673e8-0f8a-4bc8-8c9c-cb875b353142" containerName="registry-server" containerID="cri-o://84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098" gracePeriod=2 Feb 03 07:47:41 crc kubenswrapper[4708]: I0203 07:47:41.978543 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.029436 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sl7cw\" (UniqueName: \"kubernetes.io/projected/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-kube-api-access-sl7cw\") pod \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.029647 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-utilities\") pod \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.029704 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-catalog-content\") pod \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\" (UID: \"3bf673e8-0f8a-4bc8-8c9c-cb875b353142\") " Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.038672 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-kube-api-access-sl7cw" (OuterVolumeSpecName: "kube-api-access-sl7cw") pod "3bf673e8-0f8a-4bc8-8c9c-cb875b353142" (UID: "3bf673e8-0f8a-4bc8-8c9c-cb875b353142"). InnerVolumeSpecName "kube-api-access-sl7cw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.040072 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-utilities" (OuterVolumeSpecName: "utilities") pod "3bf673e8-0f8a-4bc8-8c9c-cb875b353142" (UID: "3bf673e8-0f8a-4bc8-8c9c-cb875b353142"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.131998 4708 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-utilities\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.132037 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sl7cw\" (UniqueName: \"kubernetes.io/projected/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-kube-api-access-sl7cw\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.160309 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3bf673e8-0f8a-4bc8-8c9c-cb875b353142" (UID: "3bf673e8-0f8a-4bc8-8c9c-cb875b353142"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.234532 4708 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bf673e8-0f8a-4bc8-8c9c-cb875b353142-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.510291 4708 generic.go:334] "Generic (PLEG): container finished" podID="3bf673e8-0f8a-4bc8-8c9c-cb875b353142" containerID="84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098" exitCode=0 Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.510340 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76pxr" event={"ID":"3bf673e8-0f8a-4bc8-8c9c-cb875b353142","Type":"ContainerDied","Data":"84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098"} Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.510664 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76pxr" event={"ID":"3bf673e8-0f8a-4bc8-8c9c-cb875b353142","Type":"ContainerDied","Data":"9d8df4ec0aee17859a134e8f3b36a5635184b93d5e6dee50ec230aa6b69d5f1e"} Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.510368 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-76pxr" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.510691 4708 scope.go:117] "RemoveContainer" containerID="84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.536237 4708 scope.go:117] "RemoveContainer" containerID="d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.557701 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-76pxr"] Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.561240 4708 scope.go:117] "RemoveContainer" containerID="83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.565015 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-76pxr"] Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.612100 4708 scope.go:117] "RemoveContainer" containerID="84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098" Feb 03 07:47:42 crc kubenswrapper[4708]: E0203 07:47:42.612572 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098\": container with ID starting with 84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098 not found: ID does not exist" containerID="84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.612677 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098"} err="failed to get container status \"84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098\": rpc error: code = NotFound desc = could not find container \"84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098\": container with ID starting with 84756109a0314a9f2a255efef6ff0c3a2bc927da12ba4690b6530580fdabf098 not found: ID does not exist" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.612754 4708 scope.go:117] "RemoveContainer" containerID="d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41" Feb 03 07:47:42 crc kubenswrapper[4708]: E0203 07:47:42.613265 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41\": container with ID starting with d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41 not found: ID does not exist" containerID="d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.613347 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41"} err="failed to get container status \"d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41\": rpc error: code = NotFound desc = could not find container \"d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41\": container with ID starting with d37f187383f04f1dc292982ce7dea41dbfd7ba8776f53ae08176b9baaaae6e41 not found: ID does not exist" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.613410 4708 scope.go:117] "RemoveContainer" containerID="83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a" Feb 03 07:47:42 crc kubenswrapper[4708]: E0203 07:47:42.613684 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a\": container with ID starting with 83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a not found: ID does not exist" containerID="83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a" Feb 03 07:47:42 crc kubenswrapper[4708]: I0203 07:47:42.613758 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a"} err="failed to get container status \"83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a\": rpc error: code = NotFound desc = could not find container \"83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a\": container with ID starting with 83b26165079c045517ead4648c362e791e315a27090b343f46ae41ec2636de2a not found: ID does not exist" Feb 03 07:47:44 crc kubenswrapper[4708]: I0203 07:47:44.123213 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bf673e8-0f8a-4bc8-8c9c-cb875b353142" path="/var/lib/kubelet/pods/3bf673e8-0f8a-4bc8-8c9c-cb875b353142/volumes" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.158393 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6f676fd47d-s9mvl_7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e/barbican-api/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.286919 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6f676fd47d-s9mvl_7e8eb0a1-9803-4750-9f2e-5cdb5b86a41e/barbican-api-log/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.334566 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-db7889686-b8tst_4e40bc4a-7f69-410e-b310-4cb12a8a7f58/barbican-keystone-listener/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.445121 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-db7889686-b8tst_4e40bc4a-7f69-410e-b310-4cb12a8a7f58/barbican-keystone-listener-log/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.547899 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6cdbf88dd5-z8pqs_00f8a942-b096-49d1-b020-c1aa13eb42c4/barbican-worker-log/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.581385 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6cdbf88dd5-z8pqs_00f8a942-b096-49d1-b020-c1aa13eb42c4/barbican-worker/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.693366 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_11c88b16-1207-482e-af23-035b4b973d3b/ceilometer-central-agent/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.744064 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_11c88b16-1207-482e-af23-035b4b973d3b/ceilometer-notification-agent/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.782563 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_11c88b16-1207-482e-af23-035b4b973d3b/proxy-httpd/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.817157 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_11c88b16-1207-482e-af23-035b4b973d3b/sg-core/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.964203 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e8626bc3-c20f-47d2-b183-9d27e9ec814c/cinder-api/0.log" Feb 03 07:48:00 crc kubenswrapper[4708]: I0203 07:48:00.985736 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e8626bc3-c20f-47d2-b183-9d27e9ec814c/cinder-api-log/0.log" Feb 03 07:48:01 crc kubenswrapper[4708]: I0203 07:48:01.157479 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_55a84e3b-6f9a-44d0-b059-2a4c842810dc/probe/0.log" Feb 03 07:48:01 crc kubenswrapper[4708]: I0203 07:48:01.215331 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_55a84e3b-6f9a-44d0-b059-2a4c842810dc/cinder-scheduler/0.log" Feb 03 07:48:01 crc kubenswrapper[4708]: I0203 07:48:01.258769 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-86mmh_b0112ed3-3c81-4e21-ae47-89c473987dec/init/0.log" Feb 03 07:48:01 crc kubenswrapper[4708]: I0203 07:48:01.457381 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-86mmh_b0112ed3-3c81-4e21-ae47-89c473987dec/init/0.log" Feb 03 07:48:01 crc kubenswrapper[4708]: I0203 07:48:01.484569 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-89c5cd4d5-86mmh_b0112ed3-3c81-4e21-ae47-89c473987dec/dnsmasq-dns/0.log" Feb 03 07:48:01 crc kubenswrapper[4708]: I0203 07:48:01.516910 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a1cfcb85-5e57-43d2-8255-4be0c18d60f0/glance-httpd/0.log" Feb 03 07:48:01 crc kubenswrapper[4708]: I0203 07:48:01.678983 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a1cfcb85-5e57-43d2-8255-4be0c18d60f0/glance-log/0.log" Feb 03 07:48:01 crc kubenswrapper[4708]: I0203 07:48:01.729418 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6/glance-log/0.log" Feb 03 07:48:01 crc kubenswrapper[4708]: I0203 07:48:01.740844 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_05cbc1a7-e1e9-4651-b6fe-5f4c98bb88e6/glance-httpd/0.log" Feb 03 07:48:01 crc kubenswrapper[4708]: I0203 07:48:01.911071 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-5fffdc6c76-m5s5d_e7212cfb-233f-4a09-ae76-fcfe61a4ed14/init/0.log" Feb 03 07:48:02 crc kubenswrapper[4708]: I0203 07:48:02.115724 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-5fffdc6c76-m5s5d_e7212cfb-233f-4a09-ae76-fcfe61a4ed14/init/0.log" Feb 03 07:48:02 crc kubenswrapper[4708]: I0203 07:48:02.140269 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-5fffdc6c76-m5s5d_e7212cfb-233f-4a09-ae76-fcfe61a4ed14/ironic-api-log/0.log" Feb 03 07:48:02 crc kubenswrapper[4708]: I0203 07:48:02.213429 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-5fffdc6c76-m5s5d_e7212cfb-233f-4a09-ae76-fcfe61a4ed14/ironic-api/0.log" Feb 03 07:48:02 crc kubenswrapper[4708]: I0203 07:48:02.309236 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/init/0.log" Feb 03 07:48:02 crc kubenswrapper[4708]: I0203 07:48:02.515099 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/init/0.log" Feb 03 07:48:02 crc kubenswrapper[4708]: I0203 07:48:02.533425 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ironic-python-agent-init/0.log" Feb 03 07:48:02 crc kubenswrapper[4708]: I0203 07:48:02.633164 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ironic-python-agent-init/0.log" Feb 03 07:48:02 crc kubenswrapper[4708]: I0203 07:48:02.839124 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/init/0.log" Feb 03 07:48:02 crc kubenswrapper[4708]: I0203 07:48:02.859228 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ironic-python-agent-init/0.log" Feb 03 07:48:03 crc kubenswrapper[4708]: I0203 07:48:03.182405 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/pxe-init/0.log" Feb 03 07:48:03 crc kubenswrapper[4708]: I0203 07:48:03.261909 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/init/0.log" Feb 03 07:48:03 crc kubenswrapper[4708]: I0203 07:48:03.353624 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ironic-python-agent-init/0.log" Feb 03 07:48:03 crc kubenswrapper[4708]: I0203 07:48:03.520192 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/httpboot/0.log" Feb 03 07:48:03 crc kubenswrapper[4708]: I0203 07:48:03.753403 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ironic-conductor/0.log" Feb 03 07:48:03 crc kubenswrapper[4708]: I0203 07:48:03.905061 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/ramdisk-logs/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.185735 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/pxe-init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.219141 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-8w6c7_ec5c8613-c88b-4cc5-8ad4-440e65523618/init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.352507 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/pxe-init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.380275 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-8w6c7_ec5c8613-c88b-4cc5-8ad4-440e65523618/init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.405087 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-db-sync-8w6c7_ec5c8613-c88b-4cc5-8ad4-440e65523618/ironic-db-sync/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.573657 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-python-agent-init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.629569 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-conductor-0_361821ae-c957-4e31-bb9b-6d659aaceec4/pxe-init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.714392 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-python-agent-init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.735173 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/inspector-pxe-init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.735580 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/inspector-pxe-init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.938833 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-python-agent-init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.955154 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/inspector-pxe-init/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.959259 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-inspector/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.960774 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/inspector-httpboot/0.log" Feb 03 07:48:04 crc kubenswrapper[4708]: I0203 07:48:04.983681 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-inspector/1.log" Feb 03 07:48:05 crc kubenswrapper[4708]: I0203 07:48:05.163777 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-db-sync-v45nm_74d486e0-cafe-4001-a817-dea3959bb928/ironic-inspector-db-sync/0.log" Feb 03 07:48:05 crc kubenswrapper[4708]: I0203 07:48:05.202297 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ironic-inspector-httpd/0.log" Feb 03 07:48:05 crc kubenswrapper[4708]: I0203 07:48:05.234368 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-inspector-0_2365ac26-e49a-4ab3-8781-20c1b697b51d/ramdisk-logs/0.log" Feb 03 07:48:05 crc kubenswrapper[4708]: I0203 07:48:05.333293 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-neutron-agent-95b7948fb-x2nkv_aeb72dfd-3f7b-41fa-882f-3290c463fcbe/ironic-neutron-agent/2.log" Feb 03 07:48:05 crc kubenswrapper[4708]: I0203 07:48:05.384321 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ironic-neutron-agent-95b7948fb-x2nkv_aeb72dfd-3f7b-41fa-882f-3290c463fcbe/ironic-neutron-agent/1.log" Feb 03 07:48:05 crc kubenswrapper[4708]: I0203 07:48:05.556451 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e4d6ad9e-1d8f-4d13-a3ae-6e3a283fc697/kube-state-metrics/0.log" Feb 03 07:48:05 crc kubenswrapper[4708]: I0203 07:48:05.642647 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7ddbc898b8-cqt5j_11f8a75b-7b47-4838-9751-5a03516154e7/keystone-api/0.log" Feb 03 07:48:05 crc kubenswrapper[4708]: I0203 07:48:05.923349 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-59974d4f4f-tvqgn_49513617-6e38-4ae1-ae96-b74bf325d19a/neutron-api/0.log" Feb 03 07:48:05 crc kubenswrapper[4708]: I0203 07:48:05.937665 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-59974d4f4f-tvqgn_49513617-6e38-4ae1-ae96-b74bf325d19a/neutron-httpd/0.log" Feb 03 07:48:06 crc kubenswrapper[4708]: I0203 07:48:06.284768 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0dead86a-ae50-4e2d-b917-c23cf0a6bf6c/nova-api-log/0.log" Feb 03 07:48:06 crc kubenswrapper[4708]: I0203 07:48:06.372421 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_58ec9777-9aec-4d92-a3a5-6266f6288046/nova-cell0-conductor-conductor/0.log" Feb 03 07:48:06 crc kubenswrapper[4708]: I0203 07:48:06.412699 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0dead86a-ae50-4e2d-b917-c23cf0a6bf6c/nova-api-api/0.log" Feb 03 07:48:06 crc kubenswrapper[4708]: I0203 07:48:06.657868 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_209b7389-309a-47b9-bc02-7f7567848b8f/nova-cell1-conductor-conductor/0.log" Feb 03 07:48:06 crc kubenswrapper[4708]: I0203 07:48:06.702805 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_fde60da0-56e9-4d52-b602-8060c10dfb5a/nova-cell1-novncproxy-novncproxy/0.log" Feb 03 07:48:06 crc kubenswrapper[4708]: I0203 07:48:06.957757 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_999ecc72-71e3-4f11-910a-27bd07aa4a05/nova-metadata-log/0.log" Feb 03 07:48:07 crc kubenswrapper[4708]: I0203 07:48:07.155966 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8484d145-abd4-4112-b81c-338bf4d9285f/mysql-bootstrap/0.log" Feb 03 07:48:07 crc kubenswrapper[4708]: I0203 07:48:07.185456 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_5f68c9ee-6d86-4dad-b9cb-1a22c7afd031/nova-scheduler-scheduler/0.log" Feb 03 07:48:07 crc kubenswrapper[4708]: I0203 07:48:07.423516 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8484d145-abd4-4112-b81c-338bf4d9285f/galera/0.log" Feb 03 07:48:07 crc kubenswrapper[4708]: I0203 07:48:07.433041 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_8484d145-abd4-4112-b81c-338bf4d9285f/mysql-bootstrap/0.log" Feb 03 07:48:07 crc kubenswrapper[4708]: I0203 07:48:07.446099 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_999ecc72-71e3-4f11-910a-27bd07aa4a05/nova-metadata-metadata/0.log" Feb 03 07:48:07 crc kubenswrapper[4708]: I0203 07:48:07.617138 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5f6fa285-4374-4be5-b4cf-e3dd8ef56762/mysql-bootstrap/0.log" Feb 03 07:48:07 crc kubenswrapper[4708]: I0203 07:48:07.783502 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_db624ad8-1c0f-4100-b3a2-4c80e02c1b03/openstackclient/0.log" Feb 03 07:48:07 crc kubenswrapper[4708]: I0203 07:48:07.857128 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5f6fa285-4374-4be5-b4cf-e3dd8ef56762/mysql-bootstrap/0.log" Feb 03 07:48:07 crc kubenswrapper[4708]: I0203 07:48:07.924638 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5f6fa285-4374-4be5-b4cf-e3dd8ef56762/galera/0.log" Feb 03 07:48:08 crc kubenswrapper[4708]: I0203 07:48:08.028179 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-44cl7_2acc0e69-490c-4b5c-8486-bf0fd3fb6316/openstack-network-exporter/0.log" Feb 03 07:48:08 crc kubenswrapper[4708]: I0203 07:48:08.188254 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-48bcs_6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024/ovsdb-server-init/0.log" Feb 03 07:48:08 crc kubenswrapper[4708]: I0203 07:48:08.428491 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-48bcs_6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024/ovsdb-server/0.log" Feb 03 07:48:08 crc kubenswrapper[4708]: I0203 07:48:08.443665 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-48bcs_6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024/ovsdb-server-init/0.log" Feb 03 07:48:08 crc kubenswrapper[4708]: I0203 07:48:08.460330 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-48bcs_6e5c2bfa-fda1-419b-8d9c-7e9b4e7c9024/ovs-vswitchd/0.log" Feb 03 07:48:08 crc kubenswrapper[4708]: I0203 07:48:08.612531 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-pb4xp_3b5a2d58-5ebb-4838-a798-bc280fe99951/ovn-controller/0.log" Feb 03 07:48:08 crc kubenswrapper[4708]: I0203 07:48:08.630489 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d1a2b7e5-23d7-48f6-b144-d575da1e613d/openstack-network-exporter/0.log" Feb 03 07:48:08 crc kubenswrapper[4708]: I0203 07:48:08.738713 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d1a2b7e5-23d7-48f6-b144-d575da1e613d/ovn-northd/0.log" Feb 03 07:48:08 crc kubenswrapper[4708]: I0203 07:48:08.858349 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6c70c9bb-deb5-45aa-96e6-aea4e711f93a/openstack-network-exporter/0.log" Feb 03 07:48:08 crc kubenswrapper[4708]: I0203 07:48:08.915595 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_6c70c9bb-deb5-45aa-96e6-aea4e711f93a/ovsdbserver-nb/0.log" Feb 03 07:48:09 crc kubenswrapper[4708]: I0203 07:48:09.102281 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d09d6786-b507-4848-977f-a5e94b77d0ad/openstack-network-exporter/0.log" Feb 03 07:48:09 crc kubenswrapper[4708]: I0203 07:48:09.140461 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d09d6786-b507-4848-977f-a5e94b77d0ad/ovsdbserver-sb/0.log" Feb 03 07:48:09 crc kubenswrapper[4708]: I0203 07:48:09.243125 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-67599f68dd-cgvwn_11e8be85-5666-4e3d-8964-b0d554d5b1ef/placement-api/0.log" Feb 03 07:48:09 crc kubenswrapper[4708]: I0203 07:48:09.381528 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a1eb365e-2bf1-450f-90ae-5ca8f2de2de6/setup-container/0.log" Feb 03 07:48:09 crc kubenswrapper[4708]: I0203 07:48:09.427186 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-67599f68dd-cgvwn_11e8be85-5666-4e3d-8964-b0d554d5b1ef/placement-log/0.log" Feb 03 07:48:09 crc kubenswrapper[4708]: I0203 07:48:09.575983 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a1eb365e-2bf1-450f-90ae-5ca8f2de2de6/setup-container/0.log" Feb 03 07:48:09 crc kubenswrapper[4708]: I0203 07:48:09.670672 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_baf187c5-9fe4-4496-8f70-ac916d0bb075/setup-container/0.log" Feb 03 07:48:09 crc kubenswrapper[4708]: I0203 07:48:09.690729 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a1eb365e-2bf1-450f-90ae-5ca8f2de2de6/rabbitmq/0.log" Feb 03 07:48:09 crc kubenswrapper[4708]: I0203 07:48:09.921297 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_baf187c5-9fe4-4496-8f70-ac916d0bb075/setup-container/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.011654 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_baf187c5-9fe4-4496-8f70-ac916d0bb075/rabbitmq/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.084438 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-85b58fb76c-jldbq_7e6d6f02-2176-4c8f-93c4-cb78832fc2d3/proxy-httpd/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.204728 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-85b58fb76c-jldbq_7e6d6f02-2176-4c8f-93c4-cb78832fc2d3/proxy-server/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.251360 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-zljlj_fdec39a4-6222-4122-901f-4a6603afc348/swift-ring-rebalance/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.417419 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/account-auditor/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.521966 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/account-replicator/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.535129 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/account-reaper/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.610928 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/account-server/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.681968 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/container-auditor/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.749397 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/container-replicator/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.799061 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/container-server/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.859968 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/container-updater/0.log" Feb 03 07:48:10 crc kubenswrapper[4708]: I0203 07:48:10.941393 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/object-auditor/0.log" Feb 03 07:48:11 crc kubenswrapper[4708]: I0203 07:48:11.013689 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/object-replicator/0.log" Feb 03 07:48:11 crc kubenswrapper[4708]: I0203 07:48:11.052869 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/object-expirer/0.log" Feb 03 07:48:11 crc kubenswrapper[4708]: I0203 07:48:11.080038 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/object-server/0.log" Feb 03 07:48:11 crc kubenswrapper[4708]: I0203 07:48:11.116037 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/object-updater/0.log" Feb 03 07:48:11 crc kubenswrapper[4708]: I0203 07:48:11.264729 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/swift-recon-cron/0.log" Feb 03 07:48:11 crc kubenswrapper[4708]: I0203 07:48:11.298102 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_a0593ff7-ba15-46be-8879-70dc42f3beb2/rsync/0.log" Feb 03 07:48:16 crc kubenswrapper[4708]: I0203 07:48:16.260707 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_98eee8d5-f15e-4add-86d3-d19f15018230/memcached/0.log" Feb 03 07:48:33 crc kubenswrapper[4708]: I0203 07:48:33.617236 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/util/0.log" Feb 03 07:48:33 crc kubenswrapper[4708]: I0203 07:48:33.796734 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/util/0.log" Feb 03 07:48:33 crc kubenswrapper[4708]: I0203 07:48:33.821339 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/pull/0.log" Feb 03 07:48:33 crc kubenswrapper[4708]: I0203 07:48:33.869115 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/pull/0.log" Feb 03 07:48:34 crc kubenswrapper[4708]: I0203 07:48:34.106158 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/extract/0.log" Feb 03 07:48:34 crc kubenswrapper[4708]: I0203 07:48:34.110456 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/util/0.log" Feb 03 07:48:34 crc kubenswrapper[4708]: I0203 07:48:34.111225 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_63378dc21729f3d32b6e530e80ca35a0ef8bb7a2083eb5a2c98b94aa76qg8gg_93b116c1-740f-430c-bb44-20ffc67925f0/pull/0.log" Feb 03 07:48:34 crc kubenswrapper[4708]: I0203 07:48:34.324549 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-fc589b45f-mqk95_fe83b4e1-7562-495b-99bc-aa5d1202881c/manager/0.log" Feb 03 07:48:34 crc kubenswrapper[4708]: I0203 07:48:34.596707 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-8f4c5cb64-d2ddp_bdd317ff-3849-4a28-9640-dd4611b86599/manager/0.log" Feb 03 07:48:34 crc kubenswrapper[4708]: I0203 07:48:34.700468 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5d77f4dbc9-7vww8_0b3f5149-6624-450b-b3bd-be0d0ca78c73/manager/0.log" Feb 03 07:48:34 crc kubenswrapper[4708]: I0203 07:48:34.854622 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-65dc6c8d9c-bgm44_794426b0-cf19-43ff-957e-3413c77f0570/manager/0.log" Feb 03 07:48:35 crc kubenswrapper[4708]: I0203 07:48:35.035972 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-gqvwr_35c2f81b-a6df-4f5c-98c9-e9efb7f362b4/manager/0.log" Feb 03 07:48:35 crc kubenswrapper[4708]: I0203 07:48:35.418678 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-866f9bb544-m4775_f1edcba9-46e3-49fd-bb48-ba29b86c7bac/manager/0.log" Feb 03 07:48:35 crc kubenswrapper[4708]: I0203 07:48:35.450304 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5d86df5cd7-2pljs_c6a27492-3276-45de-a2d9-1c605152a0b6/manager/0.log" Feb 03 07:48:35 crc kubenswrapper[4708]: I0203 07:48:35.579753 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-btqlz_9f166dd2-52e4-473c-9168-c065582fa0e4/manager/0.log" Feb 03 07:48:35 crc kubenswrapper[4708]: I0203 07:48:35.674122 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-64469b487f-kccqb_3e86abc8-c97c-4eef-b181-0d87376edd8f/manager/0.log" Feb 03 07:48:35 crc kubenswrapper[4708]: I0203 07:48:35.736807 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7775d87d9d-gpdwp_d6260b8b-c5f5-4803-8305-0b14903926c9/manager/0.log" Feb 03 07:48:35 crc kubenswrapper[4708]: I0203 07:48:35.890676 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-gl2lx_3b0ccfa3-4ef3-4e3c-9127-59e1abc6631d/manager/0.log" Feb 03 07:48:35 crc kubenswrapper[4708]: I0203 07:48:35.987501 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-576995988b-tdxnl_7d3cec4a-da6f-431a-98d7-c4784bb248bc/manager/0.log" Feb 03 07:48:36 crc kubenswrapper[4708]: I0203 07:48:36.265644 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5644b66645-m7mbt_802333ba-2384-4688-b939-28cbfda8bfc1/manager/0.log" Feb 03 07:48:36 crc kubenswrapper[4708]: I0203 07:48:36.374181 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4dkvr62_f6f38306-d4b2-46fa-9c49-8ac276362db8/manager/0.log" Feb 03 07:48:36 crc kubenswrapper[4708]: I0203 07:48:36.769571 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-d8cb6cd6c-cknw2_bc5c0cc4-1640-4e46-86b7-ed4ce809d4aa/operator/0.log" Feb 03 07:48:37 crc kubenswrapper[4708]: I0203 07:48:37.060975 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-pdxl8_a6e65791-8332-41fb-aac6-d17c3ac9d6f6/registry-server/0.log" Feb 03 07:48:37 crc kubenswrapper[4708]: I0203 07:48:37.333687 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-mlxcw_6cee24b4-302f-48db-badb-39bcab5756d9/manager/0.log" Feb 03 07:48:37 crc kubenswrapper[4708]: I0203 07:48:37.464621 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7b89ddb58-x6prm_240226bb-f320-4bd5-87ad-1d219c9e61e7/manager/0.log" Feb 03 07:48:37 crc kubenswrapper[4708]: I0203 07:48:37.584257 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-k5zbb_7d53946e-45e4-4abe-b4e7-d64339fdedd3/manager/0.log" Feb 03 07:48:37 crc kubenswrapper[4708]: I0203 07:48:37.835028 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-mtgqj_3841da74-e9f4-4f19-ae3c-66e117029c51/operator/0.log" Feb 03 07:48:37 crc kubenswrapper[4708]: I0203 07:48:37.909736 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-79f7df8fc4-bhfdd_b5a7b398-66a9-4c39-a940-631bcc804dfe/manager/0.log" Feb 03 07:48:37 crc kubenswrapper[4708]: I0203 07:48:37.951100 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-7b89fdf75b-94vx4_ac5a5419-6887-45ea-944d-1c8f51816492/manager/0.log" Feb 03 07:48:38 crc kubenswrapper[4708]: I0203 07:48:38.077990 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-rxvkg_1d0931b6-6d69-4702-9b8c-93f1a6600bbe/manager/0.log" Feb 03 07:48:38 crc kubenswrapper[4708]: I0203 07:48:38.133644 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-565849b54-rwgmc_faade3fc-fd45-4bcf-8aa5-0b0a3765581f/manager/0.log" Feb 03 07:48:38 crc kubenswrapper[4708]: I0203 07:48:38.268551 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-586b95b788-gldzv_5c9c90e2-345b-4a13-9acc-6e4d98113779/manager/0.log" Feb 03 07:48:56 crc kubenswrapper[4708]: I0203 07:48:56.566013 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-p5qw9_915701ce-919a-4743-b390-fa72105516e1/control-plane-machine-set-operator/0.log" Feb 03 07:48:56 crc kubenswrapper[4708]: I0203 07:48:56.743353 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9fjv2_a34c5792-5895-4d08-9e7e-b3948f5be096/kube-rbac-proxy/0.log" Feb 03 07:48:56 crc kubenswrapper[4708]: I0203 07:48:56.777409 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9fjv2_a34c5792-5895-4d08-9e7e-b3948f5be096/machine-api-operator/0.log" Feb 03 07:49:08 crc kubenswrapper[4708]: I0203 07:49:08.546813 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-jpxrc_59a903ed-9dad-488c-b531-cbe96052d31b/cert-manager-controller/0.log" Feb 03 07:49:08 crc kubenswrapper[4708]: I0203 07:49:08.706613 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-xfwvc_4df04764-d566-42a4-b7f4-af82a04b3fc3/cert-manager-cainjector/0.log" Feb 03 07:49:08 crc kubenswrapper[4708]: I0203 07:49:08.713972 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-j4njw_4b1cdb17-b07e-4d3a-86fd-418361057f9d/cert-manager-webhook/0.log" Feb 03 07:49:20 crc kubenswrapper[4708]: I0203 07:49:20.796997 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-nwm6n_5c6dbe91-1ee1-4629-bbee-e661af990956/nmstate-console-plugin/0.log" Feb 03 07:49:20 crc kubenswrapper[4708]: I0203 07:49:20.978920 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-ls24k_09450ee3-4732-4c81-8bf1-cca9c8d8fdc6/nmstate-handler/0.log" Feb 03 07:49:20 crc kubenswrapper[4708]: I0203 07:49:20.991707 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-vrkbt_f3e72a2c-73aa-410e-8386-1a2e6b510d4f/kube-rbac-proxy/0.log" Feb 03 07:49:21 crc kubenswrapper[4708]: I0203 07:49:21.096244 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-vrkbt_f3e72a2c-73aa-410e-8386-1a2e6b510d4f/nmstate-metrics/0.log" Feb 03 07:49:21 crc kubenswrapper[4708]: I0203 07:49:21.192408 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-jx7x5_3396f4c1-fb82-428b-bde6-0f30b8bf6c59/nmstate-operator/0.log" Feb 03 07:49:21 crc kubenswrapper[4708]: I0203 07:49:21.298283 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-b725x_4bcad1f4-a07d-4a93-8b5b-b6df72d2e34e/nmstate-webhook/0.log" Feb 03 07:49:23 crc kubenswrapper[4708]: I0203 07:49:23.835448 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:49:23 crc kubenswrapper[4708]: I0203 07:49:23.835785 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.071292 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-cbjxc_fae1d180-0dfb-4f07-922c-5b158d2ebcd3/kube-rbac-proxy/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.163592 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-cbjxc_fae1d180-0dfb-4f07-922c-5b158d2ebcd3/controller/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.287375 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-frr-files/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.490930 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-frr-files/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.521380 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-reloader/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.563437 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-reloader/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.569700 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-metrics/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.704506 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-frr-files/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.756420 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-metrics/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.771169 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-metrics/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.774241 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-reloader/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.934609 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-metrics/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.934846 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-frr-files/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.936355 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/cp-reloader/0.log" Feb 03 07:49:46 crc kubenswrapper[4708]: I0203 07:49:46.979378 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/controller/0.log" Feb 03 07:49:47 crc kubenswrapper[4708]: I0203 07:49:47.135156 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/frr-metrics/0.log" Feb 03 07:49:47 crc kubenswrapper[4708]: I0203 07:49:47.138055 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/kube-rbac-proxy/0.log" Feb 03 07:49:47 crc kubenswrapper[4708]: I0203 07:49:47.180401 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/kube-rbac-proxy-frr/0.log" Feb 03 07:49:47 crc kubenswrapper[4708]: I0203 07:49:47.375280 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-xs54b_20c5389c-b542-4620-ac99-0ecfb0ae7720/frr-k8s-webhook-server/0.log" Feb 03 07:49:47 crc kubenswrapper[4708]: I0203 07:49:47.425851 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/reloader/0.log" Feb 03 07:49:47 crc kubenswrapper[4708]: I0203 07:49:47.615752 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7f7b788fc7-dmpn2_c67268c0-d17c-4659-829e-2865b70963f0/manager/0.log" Feb 03 07:49:47 crc kubenswrapper[4708]: I0203 07:49:47.785513 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7d65755cd8-mqk5v_85431c28-b637-4bab-b63b-982307ac860c/webhook-server/0.log" Feb 03 07:49:47 crc kubenswrapper[4708]: I0203 07:49:47.946639 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hcz88_31740186-1408-414d-86ee-66b5f2219175/kube-rbac-proxy/0.log" Feb 03 07:49:48 crc kubenswrapper[4708]: I0203 07:49:48.165837 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-cd7sq_a0d5231d-187e-49dd-8f33-12613dd34c3b/frr/0.log" Feb 03 07:49:48 crc kubenswrapper[4708]: I0203 07:49:48.402124 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hcz88_31740186-1408-414d-86ee-66b5f2219175/speaker/0.log" Feb 03 07:49:53 crc kubenswrapper[4708]: I0203 07:49:53.833862 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:49:53 crc kubenswrapper[4708]: I0203 07:49:53.834494 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:49:56 crc kubenswrapper[4708]: I0203 07:49:56.871392 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-85b58fb76c-jldbq" podUID="7e6d6f02-2176-4c8f-93c4-cb78832fc2d3" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Feb 03 07:50:00 crc kubenswrapper[4708]: I0203 07:50:00.725499 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/util/0.log" Feb 03 07:50:00 crc kubenswrapper[4708]: I0203 07:50:00.971280 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/pull/0.log" Feb 03 07:50:00 crc kubenswrapper[4708]: I0203 07:50:00.990070 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/util/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.022233 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/pull/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.188210 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/util/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.241825 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/pull/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.242230 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dchxxbf_402c1015-f80b-44cf-aab2-afd529531cfd/extract/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.380877 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/util/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.549507 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/pull/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.564605 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/util/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.567406 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/pull/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.786004 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/util/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.804592 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/pull/0.log" Feb 03 07:50:01 crc kubenswrapper[4708]: I0203 07:50:01.808031 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713tqj8n_20d02b2d-b83b-4dcf-ac9d-bffece1d430c/extract/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.005066 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-utilities/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.186689 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-utilities/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.231353 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-content/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.253713 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-content/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.432279 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-content/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.433714 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/extract-utilities/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.661250 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-utilities/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.731092 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9glc4_849de51a-5755-4905-8627-1cc76e9e4647/registry-server/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.927052 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-content/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.948553 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-utilities/0.log" Feb 03 07:50:02 crc kubenswrapper[4708]: I0203 07:50:02.965345 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-content/0.log" Feb 03 07:50:03 crc kubenswrapper[4708]: I0203 07:50:03.103021 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-utilities/0.log" Feb 03 07:50:03 crc kubenswrapper[4708]: I0203 07:50:03.140833 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/extract-content/0.log" Feb 03 07:50:03 crc kubenswrapper[4708]: I0203 07:50:03.329672 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-hs7h6_760c2ebf-e516-4db6-a500-d2b897cc96de/marketplace-operator/0.log" Feb 03 07:50:03 crc kubenswrapper[4708]: I0203 07:50:03.510079 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-utilities/0.log" Feb 03 07:50:03 crc kubenswrapper[4708]: I0203 07:50:03.523965 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zkk2w_f881ea5f-3c53-4524-8999-6ecbfaf5dfef/registry-server/0.log" Feb 03 07:50:03 crc kubenswrapper[4708]: I0203 07:50:03.676545 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-utilities/0.log" Feb 03 07:50:03 crc kubenswrapper[4708]: I0203 07:50:03.702686 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-content/0.log" Feb 03 07:50:03 crc kubenswrapper[4708]: I0203 07:50:03.743523 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-content/0.log" Feb 03 07:50:03 crc kubenswrapper[4708]: I0203 07:50:03.916679 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-utilities/0.log" Feb 03 07:50:03 crc kubenswrapper[4708]: I0203 07:50:03.916685 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/extract-content/0.log" Feb 03 07:50:04 crc kubenswrapper[4708]: I0203 07:50:04.002622 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh966_9a265481-0560-4311-8886-1e3a833e487d/registry-server/0.log" Feb 03 07:50:04 crc kubenswrapper[4708]: I0203 07:50:04.097405 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-utilities/0.log" Feb 03 07:50:04 crc kubenswrapper[4708]: I0203 07:50:04.381117 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-utilities/0.log" Feb 03 07:50:04 crc kubenswrapper[4708]: I0203 07:50:04.390063 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-content/0.log" Feb 03 07:50:04 crc kubenswrapper[4708]: I0203 07:50:04.413074 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-content/0.log" Feb 03 07:50:04 crc kubenswrapper[4708]: I0203 07:50:04.562684 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-content/0.log" Feb 03 07:50:04 crc kubenswrapper[4708]: I0203 07:50:04.578118 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/extract-utilities/0.log" Feb 03 07:50:04 crc kubenswrapper[4708]: I0203 07:50:04.964938 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rpf9t_26447fe3-af8b-43e8-8aa8-e2e29f5639c1/registry-server/0.log" Feb 03 07:50:23 crc kubenswrapper[4708]: I0203 07:50:23.833600 4708 patch_prober.go:28] interesting pod/machine-config-daemon-r94bn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 03 07:50:23 crc kubenswrapper[4708]: I0203 07:50:23.834207 4708 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 03 07:50:23 crc kubenswrapper[4708]: I0203 07:50:23.834257 4708 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" Feb 03 07:50:23 crc kubenswrapper[4708]: I0203 07:50:23.835687 4708 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5"} pod="openshift-machine-config-operator/machine-config-daemon-r94bn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 03 07:50:23 crc kubenswrapper[4708]: I0203 07:50:23.835913 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" containerName="machine-config-daemon" containerID="cri-o://7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" gracePeriod=600 Feb 03 07:50:23 crc kubenswrapper[4708]: E0203 07:50:23.965844 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:50:24 crc kubenswrapper[4708]: I0203 07:50:24.877848 4708 generic.go:334] "Generic (PLEG): container finished" podID="67498414-5132-496e-9638-189f5941ace0" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" exitCode=0 Feb 03 07:50:24 crc kubenswrapper[4708]: I0203 07:50:24.877902 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerDied","Data":"7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5"} Feb 03 07:50:24 crc kubenswrapper[4708]: I0203 07:50:24.878211 4708 scope.go:117] "RemoveContainer" containerID="e97c1c7a8a378f9a3bb4de9efdba7a03465531ea21b43635da735f593a89eaba" Feb 03 07:50:24 crc kubenswrapper[4708]: I0203 07:50:24.878675 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:50:24 crc kubenswrapper[4708]: E0203 07:50:24.878971 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:50:34 crc kubenswrapper[4708]: E0203 07:50:34.680993 4708 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.53:44986->38.102.83.53:41401: read tcp 38.102.83.53:44986->38.102.83.53:41401: read: connection reset by peer Feb 03 07:50:40 crc kubenswrapper[4708]: I0203 07:50:40.095435 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:50:40 crc kubenswrapper[4708]: E0203 07:50:40.096262 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:50:54 crc kubenswrapper[4708]: I0203 07:50:54.093194 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:50:54 crc kubenswrapper[4708]: E0203 07:50:54.094088 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:51:07 crc kubenswrapper[4708]: I0203 07:51:07.093097 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:51:07 crc kubenswrapper[4708]: E0203 07:51:07.094196 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:51:20 crc kubenswrapper[4708]: I0203 07:51:20.093420 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:51:20 crc kubenswrapper[4708]: E0203 07:51:20.094296 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:51:32 crc kubenswrapper[4708]: I0203 07:51:32.103310 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:51:32 crc kubenswrapper[4708]: E0203 07:51:32.104240 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:51:44 crc kubenswrapper[4708]: I0203 07:51:44.092630 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:51:44 crc kubenswrapper[4708]: E0203 07:51:44.093361 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:51:44 crc kubenswrapper[4708]: I0203 07:51:44.657988 4708 generic.go:334] "Generic (PLEG): container finished" podID="c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9" containerID="8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1" exitCode=0 Feb 03 07:51:44 crc kubenswrapper[4708]: I0203 07:51:44.658054 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jqhrb/must-gather-xxf6t" event={"ID":"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9","Type":"ContainerDied","Data":"8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1"} Feb 03 07:51:44 crc kubenswrapper[4708]: I0203 07:51:44.658769 4708 scope.go:117] "RemoveContainer" containerID="8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1" Feb 03 07:51:45 crc kubenswrapper[4708]: I0203 07:51:45.060017 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jqhrb_must-gather-xxf6t_c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9/gather/0.log" Feb 03 07:51:55 crc kubenswrapper[4708]: I0203 07:51:55.861813 4708 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jqhrb/must-gather-xxf6t"] Feb 03 07:51:55 crc kubenswrapper[4708]: I0203 07:51:55.862332 4708 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-jqhrb/must-gather-xxf6t" podUID="c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9" containerName="copy" containerID="cri-o://28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391" gracePeriod=2 Feb 03 07:51:55 crc kubenswrapper[4708]: I0203 07:51:55.871098 4708 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jqhrb/must-gather-xxf6t"] Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.298066 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jqhrb_must-gather-xxf6t_c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9/copy/0.log" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.299156 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/must-gather-xxf6t" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.453466 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-must-gather-output\") pod \"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9\" (UID: \"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9\") " Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.453693 4708 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwrc8\" (UniqueName: \"kubernetes.io/projected/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-kube-api-access-lwrc8\") pod \"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9\" (UID: \"c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9\") " Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.461041 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-kube-api-access-lwrc8" (OuterVolumeSpecName: "kube-api-access-lwrc8") pod "c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9" (UID: "c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9"). InnerVolumeSpecName "kube-api-access-lwrc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.555741 4708 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwrc8\" (UniqueName: \"kubernetes.io/projected/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-kube-api-access-lwrc8\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.631817 4708 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9" (UID: "c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.657140 4708 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.754875 4708 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jqhrb_must-gather-xxf6t_c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9/copy/0.log" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.755276 4708 generic.go:334] "Generic (PLEG): container finished" podID="c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9" containerID="28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391" exitCode=143 Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.755327 4708 scope.go:117] "RemoveContainer" containerID="28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.755340 4708 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jqhrb/must-gather-xxf6t" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.773845 4708 scope.go:117] "RemoveContainer" containerID="8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.839097 4708 scope.go:117] "RemoveContainer" containerID="28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391" Feb 03 07:51:56 crc kubenswrapper[4708]: E0203 07:51:56.839731 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391\": container with ID starting with 28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391 not found: ID does not exist" containerID="28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.839783 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391"} err="failed to get container status \"28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391\": rpc error: code = NotFound desc = could not find container \"28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391\": container with ID starting with 28be62fbfa4581d87a83c58127014f10c5e264869452d57087d935d92fc40391 not found: ID does not exist" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.839840 4708 scope.go:117] "RemoveContainer" containerID="8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1" Feb 03 07:51:56 crc kubenswrapper[4708]: E0203 07:51:56.840257 4708 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1\": container with ID starting with 8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1 not found: ID does not exist" containerID="8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1" Feb 03 07:51:56 crc kubenswrapper[4708]: I0203 07:51:56.840291 4708 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1"} err="failed to get container status \"8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1\": rpc error: code = NotFound desc = could not find container \"8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1\": container with ID starting with 8d806bb36d4c930d67af2e13bcc70c45896c8702ae579367aa1ac935b34d27f1 not found: ID does not exist" Feb 03 07:51:57 crc kubenswrapper[4708]: I0203 07:51:57.093082 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:51:57 crc kubenswrapper[4708]: E0203 07:51:57.093468 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:51:58 crc kubenswrapper[4708]: I0203 07:51:58.104010 4708 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9" path="/var/lib/kubelet/pods/c0f22b5c-e11d-4fda-b0fc-94cdd9a501c9/volumes" Feb 03 07:52:11 crc kubenswrapper[4708]: I0203 07:52:11.092922 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:52:11 crc kubenswrapper[4708]: E0203 07:52:11.094035 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:52:26 crc kubenswrapper[4708]: I0203 07:52:26.096002 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:52:26 crc kubenswrapper[4708]: E0203 07:52:26.097222 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:52:38 crc kubenswrapper[4708]: I0203 07:52:38.093034 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:52:38 crc kubenswrapper[4708]: E0203 07:52:38.093873 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:52:51 crc kubenswrapper[4708]: I0203 07:52:51.092926 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:52:51 crc kubenswrapper[4708]: E0203 07:52:51.093719 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:53:03 crc kubenswrapper[4708]: I0203 07:53:03.093217 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:53:03 crc kubenswrapper[4708]: E0203 07:53:03.094122 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:53:17 crc kubenswrapper[4708]: I0203 07:53:17.093620 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:53:17 crc kubenswrapper[4708]: E0203 07:53:17.095262 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:53:29 crc kubenswrapper[4708]: I0203 07:53:29.093172 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:53:29 crc kubenswrapper[4708]: E0203 07:53:29.093951 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:53:43 crc kubenswrapper[4708]: I0203 07:53:43.093064 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:53:43 crc kubenswrapper[4708]: E0203 07:53:43.094120 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:53:55 crc kubenswrapper[4708]: I0203 07:53:55.093383 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:53:55 crc kubenswrapper[4708]: E0203 07:53:55.094192 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:54:06 crc kubenswrapper[4708]: I0203 07:54:06.092869 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:54:06 crc kubenswrapper[4708]: E0203 07:54:06.093718 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:54:19 crc kubenswrapper[4708]: I0203 07:54:19.094496 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:54:19 crc kubenswrapper[4708]: E0203 07:54:19.095757 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:54:32 crc kubenswrapper[4708]: I0203 07:54:32.104381 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:54:32 crc kubenswrapper[4708]: E0203 07:54:32.105429 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:54:44 crc kubenswrapper[4708]: I0203 07:54:44.093877 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:54:44 crc kubenswrapper[4708]: E0203 07:54:44.094688 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:54:55 crc kubenswrapper[4708]: I0203 07:54:55.093744 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:54:55 crc kubenswrapper[4708]: E0203 07:54:55.094470 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:55:08 crc kubenswrapper[4708]: I0203 07:55:08.093915 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:55:08 crc kubenswrapper[4708]: E0203 07:55:08.094777 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:55:22 crc kubenswrapper[4708]: I0203 07:55:22.099245 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:55:22 crc kubenswrapper[4708]: E0203 07:55:22.100350 4708 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-r94bn_openshift-machine-config-operator(67498414-5132-496e-9638-189f5941ace0)\"" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" podUID="67498414-5132-496e-9638-189f5941ace0" Feb 03 07:55:35 crc kubenswrapper[4708]: I0203 07:55:35.092928 4708 scope.go:117] "RemoveContainer" containerID="7b0b9b8600998b896bb08a1c5daf1fca59cda92e0baecdb8932c8ddaa8aab8b5" Feb 03 07:55:35 crc kubenswrapper[4708]: I0203 07:55:35.752176 4708 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-r94bn" event={"ID":"67498414-5132-496e-9638-189f5941ace0","Type":"ContainerStarted","Data":"524025a29b6205d7632dbc146c4b0254413d7f31873beae62b870ed9351ae753"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515140325030024436 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015140325030017353 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015140317247016511 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015140317247015461 5ustar corecore